From 9f00cb7700604c2b66f6b127b5d29cd77e7baa99 Mon Sep 17 00:00:00 2001 From: Stanislav Lashmanov Date: Thu, 19 Jan 2023 12:34:44 +0400 Subject: [PATCH 01/19] Blame page streaming HTML Streaming is now used on the full Blame page view. How streaming works: https://gitlab.com/gitlab-org/frontend/rfcs/-/issues/101 Changelog: added --- .../javascripts/blame/streaming/index.js | 25 +++++++++ .../pages/projects/blame/show/index.js | 2 + .../streaming/backpressure_buffer.js | 23 ++++++++ .../streaming/handle_streamed_anchor_link.js | 34 ++++++++++++ .../javascripts/streaming/render_stream.js | 53 +++++++++++++++++++ app/controllers/projects/blame_controller.rb | 16 ++++++ app/services/projects/blame_service.rb | 6 ++- app/views/layouts/_head.html.haml | 1 + app/views/projects/blame/show.html.haml | 15 ++++++ config/routes/repository.rb | 1 + 10 files changed, 175 insertions(+), 1 deletion(-) create mode 100644 app/assets/javascripts/blame/streaming/index.js create mode 100644 app/assets/javascripts/streaming/backpressure_buffer.js create mode 100644 app/assets/javascripts/streaming/handle_streamed_anchor_link.js create mode 100644 app/assets/javascripts/streaming/render_stream.js diff --git a/app/assets/javascripts/blame/streaming/index.js b/app/assets/javascripts/blame/streaming/index.js new file mode 100644 index 00000000000000..7663fea267839a --- /dev/null +++ b/app/assets/javascripts/blame/streaming/index.js @@ -0,0 +1,25 @@ +import { renderStream } from '~/streaming/render_stream'; +import { handleStreamedAnchorLink } from '~/streaming/handle_streamed_anchor_link'; +import { createAlert } from '~/flash'; +import { __ } from '~/locale'; +import Sentry from '~/sentry'; + +export async function renderBlamePageStreams(streamPromises) { + const element = document.querySelector('#blame-stream-container'); + + if (!element) return; + + const stopAnchorObserver = handleStreamedAnchorLink(element); + + const streams = await Promise.all(streamPromises); + + try { + for await (const stream of streams) { + await renderStream(stream, element); + } + stopAnchorObserver(); + } catch (error) { + Sentry.captureException(error); + createAlert({ message: __('Failed to fully load the Blame page. Try to reload the page.') }); + } +} diff --git a/app/assets/javascripts/pages/projects/blame/show/index.js b/app/assets/javascripts/pages/projects/blame/show/index.js index 1e4b9de90f2677..013dd959a85beb 100644 --- a/app/assets/javascripts/pages/projects/blame/show/index.js +++ b/app/assets/javascripts/pages/projects/blame/show/index.js @@ -1,5 +1,7 @@ import initBlob from '~/pages/projects/init_blob'; import redirectToCorrectPage from '~/blame/blame_redirect'; +import { renderBlamePageStreams } from '~/blame/streaming'; redirectToCorrectPage(); +renderBlamePageStreams(window.streams); initBlob(); diff --git a/app/assets/javascripts/streaming/backpressure_buffer.js b/app/assets/javascripts/streaming/backpressure_buffer.js new file mode 100644 index 00000000000000..3a6e5e01aba06c --- /dev/null +++ b/app/assets/javascripts/streaming/backpressure_buffer.js @@ -0,0 +1,23 @@ +export class BackpressureBuffer { + highWaterMark = 30000; + #bufferLength = 0; + #buffer = []; + + constructor(onOverflow) { + this.onOverflow = onOverflow; + } + + push(string) { + if (this.#bufferLength >= this.highWaterMark) { + this.release(); + } + this.#buffer.push(string); + this.#bufferLength += string.length; + } + + release() { + this.onOverflow(this.#buffer); + this.#buffer = []; + this.#bufferLength = 0; + } +} diff --git a/app/assets/javascripts/streaming/handle_streamed_anchor_link.js b/app/assets/javascripts/streaming/handle_streamed_anchor_link.js new file mode 100644 index 00000000000000..19970832ad9b0f --- /dev/null +++ b/app/assets/javascripts/streaming/handle_streamed_anchor_link.js @@ -0,0 +1,34 @@ +import { scrollToElement } from '~/lib/utils/common_utils'; + +const noop = () => {}; + +export function handleStreamedAnchorLink(rootElement) { + const anchor = window.location.hash; + if (!anchor) return noop; + if (anchor === '#') { + const newURL = new URL(window.location); + newURL.hash = undefined; + window.history.replaceState(window.history.state, null, newURL); + return noop; + } + + const element = document.querySelector(anchor); + if (element) return noop; + + let observer = new MutationObserver((mutationList, instance) => { + mutationList.forEach((mutation) => { + if (mutation.type !== 'childList') return; + mutation.addedNodes.forEach((node) => { + const target = node.querySelector(anchor); + if (!target) return; + scrollToElement(target); + instance.disconnect(); + observer = null; + }); + }); + }); + + observer.observe(rootElement, { childList: true, subtree: true }); + + return () => observer?.disconnect(); +} diff --git a/app/assets/javascripts/streaming/render_stream.js b/app/assets/javascripts/streaming/render_stream.js new file mode 100644 index 00000000000000..448e57f3801b20 --- /dev/null +++ b/app/assets/javascripts/streaming/render_stream.js @@ -0,0 +1,53 @@ +import { BackpressureBuffer } from '~/streaming/backpressure_buffer'; + +export function renderStream(stream, element) { + const reader = stream.getReader(); + const decoder = new TextDecoder('utf-8'); + + const streamDocument = document.implementation.createHTMLDocument('stream'); + + streamDocument.open(); + streamDocument.write(''); + + const virtualStreamingElement = streamDocument.querySelector('streaming-element'); + element.appendChild(virtualStreamingElement); + + // we need a backpressure mitigation mechanism here + // because inserting very small chunks of html (20-50 chars) causes layout thrashing + // and worsens streaming overhead + const backpressure = new BackpressureBuffer((buffer) => { + // eslint-disable-next-line no-unsanitized/method + streamDocument.write(buffer.join('')); + }); + + let res; + const result = new Promise((resolve) => { + res = resolve; + }); + + reader + .read() + .then(function processText({ done, value }) { + if (done) { + backpressure.release(); + streamDocument.write(''); + streamDocument.close(); + res(); + return; + } + + backpressure.push(decoder.decode(value).normalize('NFC')); + + reader + .read() + .then(processText) + .catch((error) => { + throw error; + }); + }) + .catch((error) => { + throw error; + }); + + return result; +} diff --git a/app/controllers/projects/blame_controller.rb b/app/controllers/projects/blame_controller.rb index cfff281604e5dd..6e3d53ebd85090 100644 --- a/app/controllers/projects/blame_controller.rb +++ b/app/controllers/projects/blame_controller.rb @@ -30,6 +30,22 @@ def show @blame_pagination = blame_service.pagination @blame_per_page = blame_service.per_page + + render locals: { total_extra_pages: blame_service.total_extra_pages } + end + + def page + @blob = @repository.blob_at(@commit.id, @path) + + environment_params = @repository.branch_exists?(@ref) ? { ref: @ref } : { commit: @commit } + environment_params[:find_latest] = true + @environment = ::Environments::EnvironmentsByDeploymentsFinder.new(@project, current_user, environment_params).execute.last + + blame_service = Projects::BlameService.new(@blob, @commit, params.permit(:page)) + + @blame = Gitlab::View::Presenter::Factory.new(blame_service.blame, project: @project, path: @path, page: blame_service.page).fabricate! + + render partial: 'page' end end diff --git a/app/services/projects/blame_service.rb b/app/services/projects/blame_service.rb index 58e146e5a32b10..b96372ceec822e 100644 --- a/app/services/projects/blame_service.rb +++ b/app/services/projects/blame_service.rb @@ -4,7 +4,7 @@ # objects module Projects class BlameService - PER_PAGE = 1000 + PER_PAGE = 2000 def initialize(blob, commit, params) @blob = blob @@ -31,6 +31,10 @@ def per_page PER_PAGE end + def total_extra_pages + (blob_lines_count / per_page).ceil + end + private attr_reader :blob, :commit, :pagination_enabled diff --git a/app/views/layouts/_head.html.haml b/app/views/layouts/_head.html.haml index dd441d0d1556b7..f0c1b0901403dd 100644 --- a/app/views/layouts/_head.html.haml +++ b/app/views/layouts/_head.html.haml @@ -10,6 +10,7 @@ %meta{ 'http-equiv' => 'X-UA-Compatible', content: 'IE=edge' } = render 'layouts/startup_js' + = yield :startup_js - if page_canonical_link %link{ rel: 'canonical', href: page_canonical_link } diff --git a/app/views/projects/blame/show.html.haml b/app/views/projects/blame/show.html.haml index 74b85a93c8ee7f..6c9e9a18f79593 100644 --- a/app/views/projects/blame/show.html.haml +++ b/app/views/projects/blame/show.html.haml @@ -1,6 +1,19 @@ - page_title _("Blame"), @blob.path, @ref - add_page_specific_style 'page_bundles/tree' - dataset = { testid: 'blob-content-holder', qa_selector: 'blame_file_content', per_page: @blame_per_page } +- if total_extra_pages != 0 + - content_for :startup_js do + = javascript_tag do + :plain + window.streams = Array.from( + { length: #{total_extra_pages} }, + (v, i) => { + const url = new URL(window.location); + url.pathname = url.pathname.replace('-/blame/', '-/blame_page/'); + url.searchParams.set('page', i + 2); + return fetch(url).then(response => response.body); + }, + ); #blob-content-holder.tree-holder.js-per-page{ data: dataset } = render "projects/blob/breadcrumb", blob: @blob, blame: true @@ -34,3 +47,5 @@ - if @blame_pagination = paginate(@blame_pagination, theme: "gitlab") + + #blame-stream-container diff --git a/config/routes/repository.rb b/config/routes/repository.rb index 0202eb80b23272..60d3d37bdc85ae 100644 --- a/config/routes/repository.rb +++ b/config/routes/repository.rb @@ -75,6 +75,7 @@ get '/tree/*id', to: 'tree#show', as: :tree get '/raw/*id', to: 'raw#show', as: :raw + get '/blame_page/*id', to: 'blame#page', as: :blame_page get '/blame/*id', to: 'blame#show', as: :blame get '/commits', to: 'commits#commits_root', as: :commits_root -- GitLab From e46ba87986508089a582460123acd943d4faf310 Mon Sep 17 00:00:00 2001 From: Stanislav Lashmanov Date: Mon, 23 Jan 2023 17:56:47 +0400 Subject: [PATCH 02/19] Use WriteableStream to handle DOM writing pressure --- .../javascripts/blame/streaming/index.js | 9 ++-- .../streaming/backpressure_buffer.js | 23 -------- .../javascripts/streaming/render_stream.js | 53 ------------------- .../javascripts/streaming/render_streams.js | 52 ++++++++++++++++++ 4 files changed, 56 insertions(+), 81 deletions(-) delete mode 100644 app/assets/javascripts/streaming/backpressure_buffer.js delete mode 100644 app/assets/javascripts/streaming/render_stream.js create mode 100644 app/assets/javascripts/streaming/render_streams.js diff --git a/app/assets/javascripts/blame/streaming/index.js b/app/assets/javascripts/blame/streaming/index.js index 7663fea267839a..a186d6b8d07311 100644 --- a/app/assets/javascripts/blame/streaming/index.js +++ b/app/assets/javascripts/blame/streaming/index.js @@ -1,4 +1,4 @@ -import { renderStream } from '~/streaming/render_stream'; +import { renderStreams } from '~/streaming/render_streams'; import { handleStreamedAnchorLink } from '~/streaming/handle_streamed_anchor_link'; import { createAlert } from '~/flash'; import { __ } from '~/locale'; @@ -14,12 +14,11 @@ export async function renderBlamePageStreams(streamPromises) { const streams = await Promise.all(streamPromises); try { - for await (const stream of streams) { - await renderStream(stream, element); - } - stopAnchorObserver(); + await renderStreams(streams, element); } catch (error) { Sentry.captureException(error); createAlert({ message: __('Failed to fully load the Blame page. Try to reload the page.') }); + } finally { + stopAnchorObserver(); } } diff --git a/app/assets/javascripts/streaming/backpressure_buffer.js b/app/assets/javascripts/streaming/backpressure_buffer.js deleted file mode 100644 index 3a6e5e01aba06c..00000000000000 --- a/app/assets/javascripts/streaming/backpressure_buffer.js +++ /dev/null @@ -1,23 +0,0 @@ -export class BackpressureBuffer { - highWaterMark = 30000; - #bufferLength = 0; - #buffer = []; - - constructor(onOverflow) { - this.onOverflow = onOverflow; - } - - push(string) { - if (this.#bufferLength >= this.highWaterMark) { - this.release(); - } - this.#buffer.push(string); - this.#bufferLength += string.length; - } - - release() { - this.onOverflow(this.#buffer); - this.#buffer = []; - this.#bufferLength = 0; - } -} diff --git a/app/assets/javascripts/streaming/render_stream.js b/app/assets/javascripts/streaming/render_stream.js deleted file mode 100644 index 448e57f3801b20..00000000000000 --- a/app/assets/javascripts/streaming/render_stream.js +++ /dev/null @@ -1,53 +0,0 @@ -import { BackpressureBuffer } from '~/streaming/backpressure_buffer'; - -export function renderStream(stream, element) { - const reader = stream.getReader(); - const decoder = new TextDecoder('utf-8'); - - const streamDocument = document.implementation.createHTMLDocument('stream'); - - streamDocument.open(); - streamDocument.write(''); - - const virtualStreamingElement = streamDocument.querySelector('streaming-element'); - element.appendChild(virtualStreamingElement); - - // we need a backpressure mitigation mechanism here - // because inserting very small chunks of html (20-50 chars) causes layout thrashing - // and worsens streaming overhead - const backpressure = new BackpressureBuffer((buffer) => { - // eslint-disable-next-line no-unsanitized/method - streamDocument.write(buffer.join('')); - }); - - let res; - const result = new Promise((resolve) => { - res = resolve; - }); - - reader - .read() - .then(function processText({ done, value }) { - if (done) { - backpressure.release(); - streamDocument.write(''); - streamDocument.close(); - res(); - return; - } - - backpressure.push(decoder.decode(value).normalize('NFC')); - - reader - .read() - .then(processText) - .catch((error) => { - throw error; - }); - }) - .catch((error) => { - throw error; - }); - - return result; -} diff --git a/app/assets/javascripts/streaming/render_streams.js b/app/assets/javascripts/streaming/render_streams.js new file mode 100644 index 00000000000000..48a21dbf3692ae --- /dev/null +++ b/app/assets/javascripts/streaming/render_streams.js @@ -0,0 +1,52 @@ +/* eslint-disable no-unsanitized/method */ + +export async function renderStreams(streams, element) { + const decoder = new TextDecoder('utf-8'); + + const streamDocument = document.implementation.createHTMLDocument('stream'); + + streamDocument.open(); + streamDocument.write(''); + + const virtualStreamingElement = streamDocument.querySelector('streaming-element'); + element.appendChild(virtualStreamingElement); + + let res; + let rej; + const result = new Promise((resolve, reject) => { + res = resolve; + rej = reject; + }); + + const queuingStrategy = new ByteLengthQueuingStrategy({ highWaterMark: 100 * 1024 }); + const domWriter = new WritableStream( + { + write(chunk) { + const decodedChunk = decoder.decode(chunk).normalize('NFC'); + return new Promise((resolve) => { + requestAnimationFrame(() => { + streamDocument.write(decodedChunk); + resolve(); + }); + }); + }, + close() { + streamDocument.close(); + res(); + }, + abort(error) { + streamDocument.close(); + rej(error); + }, + }, + queuingStrategy, + ); + + for (const stream of streams.slice(0, -1)) { + // eslint-disable-next-line no-await-in-loop + await stream.pipeTo(domWriter, { preventClose: true }); + } + await streams[streams.length - 1].pipeTo(domWriter); + + return result; +} -- GitLab From 630a09b83e882b9c4a5a6189fd13d8d8cb425ab2 Mon Sep 17 00:00:00 2001 From: Stanislav Lashmanov Date: Tue, 24 Jan 2023 02:15:08 +0400 Subject: [PATCH 03/19] Properly handle anchor links --- .../streaming/handle_streamed_anchor_link.js | 31 +++++++------------ 1 file changed, 11 insertions(+), 20 deletions(-) diff --git a/app/assets/javascripts/streaming/handle_streamed_anchor_link.js b/app/assets/javascripts/streaming/handle_streamed_anchor_link.js index 19970832ad9b0f..bbdb08181760e9 100644 --- a/app/assets/javascripts/streaming/handle_streamed_anchor_link.js +++ b/app/assets/javascripts/streaming/handle_streamed_anchor_link.js @@ -1,34 +1,25 @@ +import { throttle } from 'lodash'; import { scrollToElement } from '~/lib/utils/common_utils'; const noop = () => {}; export function handleStreamedAnchorLink(rootElement) { const anchor = window.location.hash; - if (!anchor) return noop; - if (anchor === '#') { - const newURL = new URL(window.location); - newURL.hash = undefined; - window.history.replaceState(window.history.state, null, newURL); - return noop; - } + if (!anchor || anchor === '#') return noop; const element = document.querySelector(anchor); if (element) return noop; - let observer = new MutationObserver((mutationList, instance) => { - mutationList.forEach((mutation) => { - if (mutation.type !== 'childList') return; - mutation.addedNodes.forEach((node) => { - const target = node.querySelector(anchor); - if (!target) return; - scrollToElement(target); - instance.disconnect(); - observer = null; - }); - }); - }); + const handler = throttle((mutationList, instance) => { + const target = document.querySelector(anchor); + if (!target) return; + scrollToElement(target); + instance.disconnect(); + }, 300); + + const observer = new MutationObserver(handler); observer.observe(rootElement, { childList: true, subtree: true }); - return () => observer?.disconnect(); + return () => observer.disconnect(); } -- GitLab From 608a159f06103cd77fad5d2f9d13eae3d5c1aba5 Mon Sep 17 00:00:00 2001 From: Stanislav Lashmanov Date: Thu, 26 Jan 2023 14:32:21 +0400 Subject: [PATCH 04/19] Properly handle reflow backpressure when streaming --- .../javascripts/blame/streaming/index.js | 7 +- .../streaming/render_html_streams.js | 74 +++++++++++++++++++ .../javascripts/streaming/render_streams.js | 52 ------------- 3 files changed, 77 insertions(+), 56 deletions(-) create mode 100644 app/assets/javascripts/streaming/render_html_streams.js delete mode 100644 app/assets/javascripts/streaming/render_streams.js diff --git a/app/assets/javascripts/blame/streaming/index.js b/app/assets/javascripts/blame/streaming/index.js index a186d6b8d07311..664dc2fe2c3bda 100644 --- a/app/assets/javascripts/blame/streaming/index.js +++ b/app/assets/javascripts/blame/streaming/index.js @@ -1,4 +1,4 @@ -import { renderStreams } from '~/streaming/render_streams'; +import { renderHtmlStreams } from '~/streaming/render_html_streams'; import { handleStreamedAnchorLink } from '~/streaming/handle_streamed_anchor_link'; import { createAlert } from '~/flash'; import { __ } from '~/locale'; @@ -11,10 +11,9 @@ export async function renderBlamePageStreams(streamPromises) { const stopAnchorObserver = handleStreamedAnchorLink(element); - const streams = await Promise.all(streamPromises); - try { - await renderStreams(streams, element); + const streams = await Promise.all(streamPromises); + await renderHtmlStreams(streams, element); } catch (error) { Sentry.captureException(error); createAlert({ message: __('Failed to fully load the Blame page. Try to reload the page.') }); diff --git a/app/assets/javascripts/streaming/render_html_streams.js b/app/assets/javascripts/streaming/render_html_streams.js new file mode 100644 index 00000000000000..2144a9fc037532 --- /dev/null +++ b/app/assets/javascripts/streaming/render_html_streams.js @@ -0,0 +1,74 @@ +/* eslint-disable no-unsanitized/method */ + +const MIN_CHUNK_SIZE = 32 * 1024; +const MAX_CHUNK_SIZE = 2048 * 1024; + +export async function renderHtmlStreams(streams, element) { + const decoder = new TextDecoder('utf-8'); + + const streamDocument = document.implementation.createHTMLDocument('stream'); + + streamDocument.open(); + streamDocument.write(''); + + const virtualStreamingElement = streamDocument.querySelector('streaming-element'); + element.appendChild(virtualStreamingElement); + + let res; + let rej; + const result = new Promise((resolve, reject) => { + res = resolve; + rej = reject; + }); + + const domWriter = new WritableStream({ + write(chunk) { + let resWrite; + const promise = new Promise((resolve) => { + resWrite = resolve; + }); + let cursor = 0; + let size = MIN_CHUNK_SIZE; + let previousTimestamp; + const render = (timestamp) => { + if (cursor >= chunk.byteLength) { + resWrite(); + return; + } + const decoded = decoder.decode(chunk.subarray(cursor, cursor + size)); + streamDocument.write(decoded); + cursor += size; + const duration = Math.ceil(timestamp - previousTimestamp); + // We have to keep a tight balance between the chunk size and frame rendering time + // Small chunks lead to long completion times + // Large chunks lead to long blocking times + if (duration > 128) { + size = Math.max(size / 2, MIN_CHUNK_SIZE); + // β‰ˆ15FPS target + } else if (duration < 64) { + size = Math.min(size * 2, MAX_CHUNK_SIZE); + } + previousTimestamp = timestamp; + requestAnimationFrame(render); + }; + requestAnimationFrame(render); + return promise; + }, + close() { + streamDocument.close(); + res(); + }, + abort(error) { + streamDocument.close(); + rej(error); + }, + }); + + for (const stream of streams.slice(0, -1)) { + // eslint-disable-next-line no-await-in-loop + await stream.pipeTo(domWriter, { preventClose: true }); + } + await streams[streams.length - 1].pipeTo(domWriter); + + return result; +} diff --git a/app/assets/javascripts/streaming/render_streams.js b/app/assets/javascripts/streaming/render_streams.js deleted file mode 100644 index 48a21dbf3692ae..00000000000000 --- a/app/assets/javascripts/streaming/render_streams.js +++ /dev/null @@ -1,52 +0,0 @@ -/* eslint-disable no-unsanitized/method */ - -export async function renderStreams(streams, element) { - const decoder = new TextDecoder('utf-8'); - - const streamDocument = document.implementation.createHTMLDocument('stream'); - - streamDocument.open(); - streamDocument.write(''); - - const virtualStreamingElement = streamDocument.querySelector('streaming-element'); - element.appendChild(virtualStreamingElement); - - let res; - let rej; - const result = new Promise((resolve, reject) => { - res = resolve; - rej = reject; - }); - - const queuingStrategy = new ByteLengthQueuingStrategy({ highWaterMark: 100 * 1024 }); - const domWriter = new WritableStream( - { - write(chunk) { - const decodedChunk = decoder.decode(chunk).normalize('NFC'); - return new Promise((resolve) => { - requestAnimationFrame(() => { - streamDocument.write(decodedChunk); - resolve(); - }); - }); - }, - close() { - streamDocument.close(); - res(); - }, - abort(error) { - streamDocument.close(); - rej(error); - }, - }, - queuingStrategy, - ); - - for (const stream of streams.slice(0, -1)) { - // eslint-disable-next-line no-await-in-loop - await stream.pipeTo(domWriter, { preventClose: true }); - } - await streams[streams.length - 1].pipeTo(domWriter); - - return result; -} -- GitLab From efdb625073822252b748d5cb7b01369236f1a8e4 Mon Sep 17 00:00:00 2001 From: Stanislav Lashmanov Date: Thu, 26 Jan 2023 16:14:05 +0400 Subject: [PATCH 05/19] Add Blame page streaming with a feature flag --- .../javascripts/blame/blame_redirect.js | 1 + .../javascripts/blame/streaming/index.js | 8 +- .../pages/projects/blame/show/index.js | 2 +- .../streaming/handle_streamed_anchor_link.js | 3 + .../javascripts/streaming/html_stream.js | 22 ++++++ .../streaming/render_html_streams.js | 77 ++++++++---------- app/assets/stylesheets/framework/files.scss | 28 +++++++ app/controllers/projects/blame_controller.rb | 19 ++++- app/services/projects/blame_service.rb | 16 +++- app/views/projects/blame/show.html.haml | 13 ++- .../development/blame_page_streaming.yml | 8 ++ locale/gitlab.pot | 9 ++- .../streaming/render_html_streams_spec.js | 79 +++++++++++++++++++ spec/frontend/test_setup.js | 4 + 14 files changed, 232 insertions(+), 57 deletions(-) create mode 100644 app/assets/javascripts/streaming/html_stream.js create mode 100644 config/feature_flags/development/blame_page_streaming.yml create mode 100644 spec/frontend/streaming/render_html_streams_spec.js diff --git a/app/assets/javascripts/blame/blame_redirect.js b/app/assets/javascripts/blame/blame_redirect.js index 155e2a3a2cdc49..f63bbca2dae502 100644 --- a/app/assets/javascripts/blame/blame_redirect.js +++ b/app/assets/javascripts/blame/blame_redirect.js @@ -3,6 +3,7 @@ import { createAlert } from '~/flash'; import { __ } from '~/locale'; export default function redirectToCorrectBlamePage() { + if (new URLSearchParams(window.location.search).get('streaming')) return; const { hash } = window.location; const linesPerPage = parseInt(document.querySelector('.js-per-page').dataset.perPage, 10); const params = new URLSearchParams(window.location.search); diff --git a/app/assets/javascripts/blame/streaming/index.js b/app/assets/javascripts/blame/streaming/index.js index 664dc2fe2c3bda..b2f23eae9b4d23 100644 --- a/app/assets/javascripts/blame/streaming/index.js +++ b/app/assets/javascripts/blame/streaming/index.js @@ -7,17 +7,17 @@ import Sentry from '~/sentry'; export async function renderBlamePageStreams(streamPromises) { const element = document.querySelector('#blame-stream-container'); - if (!element) return; + if (!element || !streamPromises) return; const stopAnchorObserver = handleStreamedAnchorLink(element); try { - const streams = await Promise.all(streamPromises); - await renderHtmlStreams(streams, element); + await renderHtmlStreams(streamPromises, element); } catch (error) { - Sentry.captureException(error); createAlert({ message: __('Failed to fully load the Blame page. Try to reload the page.') }); + Sentry.captureException(error); } finally { stopAnchorObserver(); + document.querySelector('#blame-stream-loading').remove(); } } diff --git a/app/assets/javascripts/pages/projects/blame/show/index.js b/app/assets/javascripts/pages/projects/blame/show/index.js index 013dd959a85beb..d45144ba84aa90 100644 --- a/app/assets/javascripts/pages/projects/blame/show/index.js +++ b/app/assets/javascripts/pages/projects/blame/show/index.js @@ -3,5 +3,5 @@ import redirectToCorrectPage from '~/blame/blame_redirect'; import { renderBlamePageStreams } from '~/blame/streaming'; redirectToCorrectPage(); -renderBlamePageStreams(window.streams); +renderBlamePageStreams(window.blamePageStreams); initBlob(); diff --git a/app/assets/javascripts/streaming/handle_streamed_anchor_link.js b/app/assets/javascripts/streaming/handle_streamed_anchor_link.js index bbdb08181760e9..1deec7256584ae 100644 --- a/app/assets/javascripts/streaming/handle_streamed_anchor_link.js +++ b/app/assets/javascripts/streaming/handle_streamed_anchor_link.js @@ -1,5 +1,6 @@ import { throttle } from 'lodash'; import { scrollToElement } from '~/lib/utils/common_utils'; +import LineHighlighter from '~/blob/line_highlighter'; const noop = () => {}; @@ -14,6 +15,8 @@ export function handleStreamedAnchorLink(rootElement) { const target = document.querySelector(anchor); if (!target) return; scrollToElement(target); + // eslint-disable-next-line no-new + new LineHighlighter(); instance.disconnect(); }, 300); diff --git a/app/assets/javascripts/streaming/html_stream.js b/app/assets/javascripts/streaming/html_stream.js new file mode 100644 index 00000000000000..331c3850cd8b99 --- /dev/null +++ b/app/assets/javascripts/streaming/html_stream.js @@ -0,0 +1,22 @@ +export class HtmlStream { + constructor(element) { + const streamDocument = document.implementation.createHTMLDocument('stream'); + + streamDocument.open(); + streamDocument.write(''); + + const virtualStreamingElement = streamDocument.querySelector('streaming-element'); + element.appendChild(virtualStreamingElement); + + this.streamDocument = streamDocument; + } + + write(chunk) { + // eslint-disable-next-line no-unsanitized/method + this.streamDocument.write(chunk); + } + + close() { + this.streamDocument.close(); + } +} diff --git a/app/assets/javascripts/streaming/render_html_streams.js b/app/assets/javascripts/streaming/render_html_streams.js index 2144a9fc037532..1ce2e83fdeebdf 100644 --- a/app/assets/javascripts/streaming/render_html_streams.js +++ b/app/assets/javascripts/streaming/render_html_streams.js @@ -1,18 +1,10 @@ -/* eslint-disable no-unsanitized/method */ +import { HtmlStream } from '~/streaming/html_stream'; const MIN_CHUNK_SIZE = 32 * 1024; const MAX_CHUNK_SIZE = 2048 * 1024; -export async function renderHtmlStreams(streams, element) { - const decoder = new TextDecoder('utf-8'); - - const streamDocument = document.implementation.createHTMLDocument('stream'); - - streamDocument.open(); - streamDocument.write(''); - - const virtualStreamingElement = streamDocument.querySelector('streaming-element'); - element.appendChild(virtualStreamingElement); +export async function renderHtmlStreams(streamPromises, element) { + const htmlStream = new HtmlStream(element); let res; let rej; @@ -21,54 +13,55 @@ export async function renderHtmlStreams(streams, element) { rej = reject; }); + const decoder = new TextDecoder('utf-8'); const domWriter = new WritableStream({ write(chunk) { - let resWrite; - const promise = new Promise((resolve) => { - resWrite = resolve; - }); let cursor = 0; let size = MIN_CHUNK_SIZE; let previousTimestamp; - const render = (timestamp) => { - if (cursor >= chunk.byteLength) { - resWrite(); - return; - } - const decoded = decoder.decode(chunk.subarray(cursor, cursor + size)); - streamDocument.write(decoded); - cursor += size; - const duration = Math.ceil(timestamp - previousTimestamp); - // We have to keep a tight balance between the chunk size and frame rendering time - // Small chunks lead to long completion times - // Large chunks lead to long blocking times - if (duration > 128) { - size = Math.max(size / 2, MIN_CHUNK_SIZE); - // β‰ˆ15FPS target - } else if (duration < 64) { - size = Math.min(size * 2, MAX_CHUNK_SIZE); - } - previousTimestamp = timestamp; + return new Promise((resolve) => { + const render = (timestamp) => { + if (cursor >= chunk.byteLength) { + resolve(); + return; + } + + const decoded = decoder.decode(chunk.subarray(cursor, cursor + size), { stream: true }); + htmlStream.write(decoded); + cursor += size; + + const duration = Math.ceil(timestamp - previousTimestamp); + // We have to keep a tight balance between the chunk size and frame rendering time + // Small chunks lead to long completion times + // Large chunks lead to long blocking times + if (duration > 128) { + size = Math.max(size / 2, MIN_CHUNK_SIZE); + // β‰ˆ15FPS target + } else if (duration < 64) { + size = Math.min(size * 2, MAX_CHUNK_SIZE); + } + previousTimestamp = timestamp; + + requestAnimationFrame(render); + }; requestAnimationFrame(render); - }; - requestAnimationFrame(render); - return promise; + }); }, close() { - streamDocument.close(); + htmlStream.close(); res(); }, abort(error) { - streamDocument.close(); + htmlStream.close(); rej(error); }, }); - for (const stream of streams.slice(0, -1)) { - // eslint-disable-next-line no-await-in-loop + for await (const stream of streamPromises.slice(0, -1)) { await stream.pipeTo(domWriter, { preventClose: true }); } - await streams[streams.length - 1].pipeTo(domWriter); + const stream = await streamPromises[streamPromises.length - 1]; + await stream.pipeTo(domWriter); return result; } diff --git a/app/assets/stylesheets/framework/files.scss b/app/assets/stylesheets/framework/files.scss index 9ea5a66b3bc4ce..12845effbce2b3 100644 --- a/app/assets/stylesheets/framework/files.scss +++ b/app/assets/stylesheets/framework/files.scss @@ -580,3 +580,31 @@ span.idiff { padding: 0; border-radius: 0 0 $border-radius-default $border-radius-default; } + +.blame-stream-container { + border-top: 1px solid $gray-darker; +} + +.blame-stream-loading { + $gradient-size: 16px; + position: sticky; + bottom: 0; + display: flex; + justify-content: center; + align-items: center; + margin-top: -$gradient-size; + height: $gl-spacing-scale-10; + border-top: $gradient-size solid transparent; + background-color: $white; + box-sizing: content-box; + background-clip: content-box; + + .gradient { + position: absolute; + left: 0; + right: 0; + top: -$gradient-size; + height: $gradient-size; + background: linear-gradient(to top, $white, transparentize($white, 1)); + } +} diff --git a/app/controllers/projects/blame_controller.rb b/app/controllers/projects/blame_controller.rb index 6e3d53ebd85090..dad3602388bde2 100644 --- a/app/controllers/projects/blame_controller.rb +++ b/app/controllers/projects/blame_controller.rb @@ -23,11 +23,18 @@ def show environment_params[:find_latest] = true @environment = ::Environments::EnvironmentsByDeploymentsFinder.new(@project, current_user, environment_params).execute.last - blame_service = Projects::BlameService.new(@blob, @commit, params.permit(:page, :no_pagination)) + permitted_params = params.permit(:page, :no_pagination, :streaming) + blame_service = Projects::BlameService.new(@blob, @commit, permitted_params) @blame = Gitlab::View::Presenter::Factory.new(blame_service.blame, project: @project, path: @path, page: blame_service.page).fabricate! - @blame_pagination = blame_service.pagination + @entire_blame_path = full_blame_path(no_pagination: true) + if blame_service.streaming_possible + @entire_blame_path = full_blame_path(streaming: true) + end + + @streaming_enabled = blame_service.streaming_state(permitted_params) + @blame_pagination = blame_service.pagination unless @streaming_enabled @blame_per_page = blame_service.per_page @@ -41,12 +48,18 @@ def page environment_params[:find_latest] = true @environment = ::Environments::EnvironmentsByDeploymentsFinder.new(@project, current_user, environment_params).execute.last - blame_service = Projects::BlameService.new(@blob, @commit, params.permit(:page)) + blame_service = Projects::BlameService.new(@blob, @commit, params.permit(:page, :streaming)) @blame = Gitlab::View::Presenter::Factory.new(blame_service.blame, project: @project, path: @path, page: blame_service.page).fabricate! render partial: 'page' end + + private + + def full_blame_path(params) + namespace_project_blame_path(namespace_id: @project.namespace, project_id: @project, id: @id, **params) + end end Projects::BlameController.prepend_mod diff --git a/app/services/projects/blame_service.rb b/app/services/projects/blame_service.rb index b96372ceec822e..12e8f80acbbac2 100644 --- a/app/services/projects/blame_service.rb +++ b/app/services/projects/blame_service.rb @@ -4,13 +4,15 @@ # objects module Projects class BlameService - PER_PAGE = 2000 + PER_PAGE = 1000 + PER_PAGE_STREAMING = 2000 def initialize(blob, commit, params) @blob = blob @commit = commit @page = extract_page(params) @pagination_enabled = pagination_state(params) + @streaming_enabled = streaming_state(params) end attr_reader :page @@ -28,6 +30,8 @@ def pagination end def per_page + return PER_PAGE_STREAMING if @streaming_enabled + PER_PAGE end @@ -35,6 +39,16 @@ def total_extra_pages (blob_lines_count / per_page).ceil end + def streaming_possible + Feature.enabled?(:blame_page_streaming, commit.project) + end + + def streaming_state(params) + return false unless streaming_possible + + Gitlab::Utils.to_boolean(params[:streaming], default: false) + end + private attr_reader :blob, :commit, :pagination_enabled diff --git a/app/views/projects/blame/show.html.haml b/app/views/projects/blame/show.html.haml index 6c9e9a18f79593..3f084d02eb489e 100644 --- a/app/views/projects/blame/show.html.haml +++ b/app/views/projects/blame/show.html.haml @@ -1,11 +1,11 @@ - page_title _("Blame"), @blob.path, @ref - add_page_specific_style 'page_bundles/tree' - dataset = { testid: 'blob-content-holder', qa_selector: 'blame_file_content', per_page: @blame_per_page } -- if total_extra_pages != 0 +- if @streaming_enabled - content_for :startup_js do = javascript_tag do :plain - window.streams = Array.from( + window.blamePageStreams = Array.from( { length: #{total_extra_pages} }, (v, i) => { const url = new URL(window.location); @@ -45,7 +45,14 @@ = render Pajamas::ButtonComponent.new(href: namespace_project_blame_path(namespace_id: @project.namespace, project_id: @project, id: @id, no_pagination: true), size: :small, button_options: { class: 'gl-mt-3' }) do |c| = _('View entire blame') + - if @streaming_enabled + #blame-stream-container.blame-stream-container + #blame-stream-loading.blame-stream-loading + .gradient + %span.gl-mx-2 + = _('Loading blame file...') + = gl_loading_icon(size: 'md') + - if @blame_pagination = paginate(@blame_pagination, theme: "gitlab") - #blame-stream-container diff --git a/config/feature_flags/development/blame_page_streaming.yml b/config/feature_flags/development/blame_page_streaming.yml new file mode 100644 index 00000000000000..982278c4d3a2d5 --- /dev/null +++ b/config/feature_flags/development/blame_page_streaming.yml @@ -0,0 +1,8 @@ +--- +name: blame_page_streaming +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/110208 +rollout_issue_url: +milestone: '15.9' +type: development +group: group::source code +default_enabled: false diff --git a/locale/gitlab.pot b/locale/gitlab.pot index 9c86b87a01a740..b317d772a47b0f 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -17280,6 +17280,9 @@ msgstr "" msgid "Failed to find users for %{missing}" msgstr "" +msgid "Failed to fully load the Blame page. Try to reload the page." +msgstr "" + msgid "Failed to generate export, please try again later." msgstr "" @@ -17975,9 +17978,6 @@ msgstr "" msgid "For example, the application using the token or the purpose of the token. Do not give sensitive information for the name of the token, as it will be visible to all %{resource_type} members." msgstr "" -msgid "For faster browsing, not all history is shown." -msgstr "" - msgid "For files larger than this limit, only index the file name. The file content is neither indexed nor searchable." msgstr "" @@ -25667,6 +25667,9 @@ msgstr "" msgid "Loading %{name}" msgstr "" +msgid "Loading blame file..." +msgstr "" + msgid "Loading contribution stats for group members" msgstr "" diff --git a/spec/frontend/streaming/render_html_streams_spec.js b/spec/frontend/streaming/render_html_streams_spec.js new file mode 100644 index 00000000000000..56b616732fba94 --- /dev/null +++ b/spec/frontend/streaming/render_html_streams_spec.js @@ -0,0 +1,79 @@ +import { ReadableStream } from 'node:stream/web'; +import { renderHtmlStreams } from '~/streaming/render_html_streams'; +import { HtmlStream } from '~/streaming/html_stream'; +import waitForPromises from 'helpers/wait_for_promises'; + +jest.mock('~/streaming/html_stream'); + +const firstStreamContent = 'foobar'; +const secondStreamContent = 'bazqux'; + +describe('renderHtmlStreams', () => { + let htmlAccumulator = ''; + const encoder = new TextEncoder(); + const createSingleChunkStream = (chunk) => { + return new ReadableStream({ + pull(controller) { + controller.enqueue(encoder.encode(chunk)); + controller.close(); + }, + }); + }; + + beforeEach(() => { + jest.spyOn(window, 'requestAnimationFrame').mockImplementation((cb) => cb(0)); + jest.spyOn(HtmlStream.prototype, 'write').mockImplementation((chunk) => { + htmlAccumulator += chunk; + }); + }); + + afterEach(() => { + htmlAccumulator = ''; + window.requestAnimationFrame.mockRestore(); + }); + + it('renders a single stream', async () => { + const closeSpy = jest.spyOn(HtmlStream.prototype, 'close'); + const stream = createSingleChunkStream(firstStreamContent); + + await renderHtmlStreams([Promise.resolve(stream)], document.body); + + expect(htmlAccumulator).toContain(firstStreamContent); + expect(closeSpy).toHaveBeenCalledTimes(1); + }); + + it('renders stream sequence', async () => { + const closeSpy = jest.spyOn(HtmlStream.prototype, 'close'); + const stream1 = createSingleChunkStream(firstStreamContent); + const stream2 = createSingleChunkStream(secondStreamContent); + + await renderHtmlStreams([Promise.resolve(stream1), Promise.resolve(stream2)], document.body); + + expect(htmlAccumulator).toContain(firstStreamContent + secondStreamContent); + expect(closeSpy).toHaveBeenCalledTimes(1); + }); + + it("doesn't wait for the whole sequence to resolve before streaming", async () => { + const closeSpy = jest.spyOn(HtmlStream.prototype, 'close'); + const stream1 = createSingleChunkStream(firstStreamContent); + const stream2 = createSingleChunkStream(secondStreamContent); + + let res; + const delayedStream = new Promise((resolve) => { + res = resolve; + }); + + renderHtmlStreams([Promise.resolve(stream1), delayedStream], document.body); + + await waitForPromises(); + + expect(htmlAccumulator).toContain(firstStreamContent); + expect(closeSpy).toHaveBeenCalledTimes(0); + + res(stream2); + await waitForPromises(); + + expect(htmlAccumulator).toContain(firstStreamContent + secondStreamContent); + expect(closeSpy).toHaveBeenCalledTimes(1); + }); +}); diff --git a/spec/frontend/test_setup.js b/spec/frontend/test_setup.js index 3fb226e5ed3cd1..2771e8673171ff 100644 --- a/spec/frontend/test_setup.js +++ b/spec/frontend/test_setup.js @@ -1,8 +1,12 @@ /* Setup for unit test environment */ // eslint-disable-next-line no-restricted-syntax import { setImmediate } from 'timers'; +import { ReadableStream, WritableStream } from 'node:stream/web'; import 'helpers/shared_test_setup'; +global.ReadableStream = ReadableStream; +global.WritableStream = WritableStream; + afterEach(() => // give Promises a bit more time so they fail the right test // eslint-disable-next-line no-restricted-syntax -- GitLab From 8b1e665ab469e11323069d2b3d5165a6e683e289 Mon Sep 17 00:00:00 2001 From: Stanislav Lashmanov Date: Fri, 27 Jan 2023 17:42:32 +0400 Subject: [PATCH 06/19] Refactor HTML streaming implementation --- .../javascripts/streaming/chunk_writer.js | 70 ++++++++++++++ app/assets/javascripts/streaming/constants.js | 5 + .../streaming/handle_streamed_anchor_link.js | 5 +- .../javascripts/streaming/html_stream.js | 1 + .../javascripts/streaming/render_balancer.js | 37 ++++++++ .../streaming/render_html_streams.js | 92 +++++++------------ app/views/projects/blame/show.html.haml | 19 ++-- .../frontend/__helpers__/shared_test_setup.js | 4 + spec/frontend/streaming/chunk_writer_spec.js | 52 +++++++++++ .../handle_streamed_anchor_link_spec.js | 75 +++++++++++++++ spec/frontend/streaming/html_stream_spec.js | 32 +++++++ .../streaming/render_balancer_spec.js | 71 ++++++++++++++ .../streaming/render_html_streams_spec.js | 8 ++ spec/frontend/test_setup.js | 4 - 14 files changed, 399 insertions(+), 76 deletions(-) create mode 100644 app/assets/javascripts/streaming/chunk_writer.js create mode 100644 app/assets/javascripts/streaming/constants.js create mode 100644 app/assets/javascripts/streaming/render_balancer.js create mode 100644 spec/frontend/streaming/chunk_writer_spec.js create mode 100644 spec/frontend/streaming/handle_streamed_anchor_link_spec.js create mode 100644 spec/frontend/streaming/html_stream_spec.js create mode 100644 spec/frontend/streaming/render_balancer_spec.js diff --git a/app/assets/javascripts/streaming/chunk_writer.js b/app/assets/javascripts/streaming/chunk_writer.js new file mode 100644 index 00000000000000..40753f05651d77 --- /dev/null +++ b/app/assets/javascripts/streaming/chunk_writer.js @@ -0,0 +1,70 @@ +import { RenderBalancer } from '~/streaming/render_balancer'; +import { + BALANCE_RATE, + HIGH_FRAME_TIME, + LOW_FRAME_TIME, + MAX_CHUNK_SIZE, + MIN_CHUNK_SIZE, +} from '~/streaming/constants'; + +function concatUint8Arrays(a, b) { + const array = new Uint8Array(a.length + b.length); + array.set(a, 0); + array.set(b, a.length); + return array; +} + +export const createChunkWriter = (write) => { + const decoder = new TextDecoder('utf-8'); + + let size = MIN_CHUNK_SIZE; + + const balancer = new RenderBalancer({ + lowFrameTime: LOW_FRAME_TIME, + highFrameTime: HIGH_FRAME_TIME, + decrease() { + size = Math.max(size / BALANCE_RATE, MIN_CHUNK_SIZE); + }, + increase() { + size = Math.min(size * BALANCE_RATE, MAX_CHUNK_SIZE); + }, + }); + + let accumulator = null; + + const handleChunk = (chunk) => { + if (accumulator) { + accumulator = concatUint8Arrays(accumulator, chunk); + } else { + accumulator = chunk; + } + + if (size > accumulator.length) return Promise.resolve(); + + let cursor = 0; + return balancer.render(() => { + const chunkPart = accumulator.subarray(cursor, cursor + size); + if (chunkPart.length < size) { + accumulator = chunkPart; + return true; + } + + const decoded = decoder.decode(chunkPart, { stream: true }); + write(decoded); + + cursor += size; + if (cursor >= accumulator.length) { + accumulator = null; + return true; + } + return false; + }); + }; + + const finalizeRendering = () => { + if (!accumulator) return; + write(decoder.decode(accumulator, { stream: true })); + }; + + return [handleChunk, finalizeRendering]; +}; diff --git a/app/assets/javascripts/streaming/constants.js b/app/assets/javascripts/streaming/constants.js new file mode 100644 index 00000000000000..fef5dc602ee87f --- /dev/null +++ b/app/assets/javascripts/streaming/constants.js @@ -0,0 +1,5 @@ +export const MIN_CHUNK_SIZE = 128 * 1024; +export const MAX_CHUNK_SIZE = 1024 * 1024; +export const LOW_FRAME_TIME = 32; +export const HIGH_FRAME_TIME = 48; +export const BALANCE_RATE = 1.5; diff --git a/app/assets/javascripts/streaming/handle_streamed_anchor_link.js b/app/assets/javascripts/streaming/handle_streamed_anchor_link.js index 1deec7256584ae..182c466eedd92e 100644 --- a/app/assets/javascripts/streaming/handle_streamed_anchor_link.js +++ b/app/assets/javascripts/streaming/handle_streamed_anchor_link.js @@ -6,10 +6,7 @@ const noop = () => {}; export function handleStreamedAnchorLink(rootElement) { const anchor = window.location.hash; - if (!anchor || anchor === '#') return noop; - - const element = document.querySelector(anchor); - if (element) return noop; + if (!anchor || anchor === '#' || document.querySelector(anchor)) return noop; const handler = throttle((mutationList, instance) => { const target = document.querySelector(anchor); diff --git a/app/assets/javascripts/streaming/html_stream.js b/app/assets/javascripts/streaming/html_stream.js index 331c3850cd8b99..d3d7ab4651d0f4 100644 --- a/app/assets/javascripts/streaming/html_stream.js +++ b/app/assets/javascripts/streaming/html_stream.js @@ -17,6 +17,7 @@ export class HtmlStream { } close() { + this.streamDocument.write(''); this.streamDocument.close(); } } diff --git a/app/assets/javascripts/streaming/render_balancer.js b/app/assets/javascripts/streaming/render_balancer.js new file mode 100644 index 00000000000000..8a862f219e7160 --- /dev/null +++ b/app/assets/javascripts/streaming/render_balancer.js @@ -0,0 +1,37 @@ +export class RenderBalancer { + previousTimestamp = undefined; + + constructor({ increase, decrease, highFrameTime, lowFrameTime }) { + this.increase = increase; + this.decrease = decrease; + this.highFrameTime = highFrameTime; + this.lowFrameTime = lowFrameTime; + } + + render(fn) { + return new Promise((resolve) => { + const callback = (timestamp) => { + const stop = fn(); + this.#throttle(timestamp); + if (!stop) requestAnimationFrame(callback); + else resolve(); + }; + requestAnimationFrame(callback); + }); + } + + #throttle(timestamp) { + const { previousTimestamp } = this; + this.previousTimestamp = timestamp; + if (!previousTimestamp) return; + + const duration = Math.round(timestamp - previousTimestamp); + if (!duration) return; + + if (duration > this.highFrameTime) { + this.decrease(); + } else if (duration < this.lowFrameTime) { + this.increase(); + } + } +} diff --git a/app/assets/javascripts/streaming/render_html_streams.js b/app/assets/javascripts/streaming/render_html_streams.js index 1ce2e83fdeebdf..cdf03934958ef5 100644 --- a/app/assets/javascripts/streaming/render_html_streams.js +++ b/app/assets/javascripts/streaming/render_html_streams.js @@ -1,67 +1,37 @@ import { HtmlStream } from '~/streaming/html_stream'; +import { createChunkWriter } from '~/streaming/chunk_writer'; -const MIN_CHUNK_SIZE = 32 * 1024; -const MAX_CHUNK_SIZE = 2048 * 1024; +export function renderHtmlStreams(streamPromises, element) { + if (streamPromises.length === 0) return Promise.resolve(); -export async function renderHtmlStreams(streamPromises, element) { const htmlStream = new HtmlStream(element); - - let res; - let rej; - const result = new Promise((resolve, reject) => { - res = resolve; - rej = reject; - }); - - const decoder = new TextDecoder('utf-8'); - const domWriter = new WritableStream({ - write(chunk) { - let cursor = 0; - let size = MIN_CHUNK_SIZE; - let previousTimestamp; - return new Promise((resolve) => { - const render = (timestamp) => { - if (cursor >= chunk.byteLength) { - resolve(); - return; - } - - const decoded = decoder.decode(chunk.subarray(cursor, cursor + size), { stream: true }); - htmlStream.write(decoded); - cursor += size; - - const duration = Math.ceil(timestamp - previousTimestamp); - // We have to keep a tight balance between the chunk size and frame rendering time - // Small chunks lead to long completion times - // Large chunks lead to long blocking times - if (duration > 128) { - size = Math.max(size / 2, MIN_CHUNK_SIZE); - // β‰ˆ15FPS target - } else if (duration < 64) { - size = Math.min(size * 2, MAX_CHUNK_SIZE); - } - previousTimestamp = timestamp; - - requestAnimationFrame(render); - }; - requestAnimationFrame(render); - }); - }, - close() { - htmlStream.close(); - res(); - }, - abort(error) { - htmlStream.close(); - rej(error); - }, + const [handleChunk, finalizeRendering] = createChunkWriter((chunk) => htmlStream.write(chunk)); + + // eslint-disable-next-line no-async-promise-executor + return new Promise(async (resolve, reject) => { + const domWriter = new WritableStream({ + write(chunk) { + return handleChunk(chunk); + }, + close() { + finalizeRendering(); + htmlStream.close(); + resolve(); + }, + abort(error) { + htmlStream.close(); + reject(error); + }, + }); + + try { + for await (const stream of streamPromises.slice(0, -1)) { + await stream.pipeTo(domWriter, { preventClose: true }); + } + const stream = await streamPromises[streamPromises.length - 1]; + await stream.pipeTo(domWriter); + } catch (error) { + domWriter.abort(error); + } }); - - for await (const stream of streamPromises.slice(0, -1)) { - await stream.pipeTo(domWriter, { preventClose: true }); - } - const stream = await streamPromises[streamPromises.length - 1]; - await stream.pipeTo(domWriter); - - return result; } diff --git a/app/views/projects/blame/show.html.haml b/app/views/projects/blame/show.html.haml index 3f084d02eb489e..598a164a687f0b 100644 --- a/app/views/projects/blame/show.html.haml +++ b/app/views/projects/blame/show.html.haml @@ -39,19 +39,24 @@ .blame-table-wrapper = render partial: 'page' + - if @streaming_enabled + #blame-stream-container.blame-stream-container + #blame-stream-loading.blame-stream-loading + .gradient + %span.gl-mx-2 + = _('Loading blame file...') + = gl_loading_icon(size: 'md') + - if @blame_pagination && @blame_pagination.total_pages > 1 .gl-display-flex.gl-justify-content-center.gl-flex-direction-column.gl-align-items-center.gl-p-3.gl-bg-gray-50.gl-border-t-solid.gl-border-t-1.gl-border-gray-100 = _('For faster browsing, not all history is shown.') = render Pajamas::ButtonComponent.new(href: namespace_project_blame_path(namespace_id: @project.namespace, project_id: @project, id: @id, no_pagination: true), size: :small, button_options: { class: 'gl-mt-3' }) do |c| = _('View entire blame') - - if @streaming_enabled - #blame-stream-container.blame-stream-container - #blame-stream-loading.blame-stream-loading - .gradient - %span.gl-mx-2 - = _('Loading blame file...') - = gl_loading_icon(size: 'md') + - if @blame_pagination && @blame_pagination.total_pages > 1 + .gl-display-flex.gl-justify-content-center.gl-flex-direction-column.gl-align-items-center.gl-p-3.gl-bg-gray-50.gl-border-t-solid.gl-border-t-1.gl-border-gray-100 + = render Pajamas::ButtonComponent.new(href: @entire_blame_path, size: :small, button_options: { class: 'gl-mt-3' }) do |c| + = _('View entire blame') - if @blame_pagination = paginate(@blame_pagination, theme: "gitlab") diff --git a/spec/frontend/__helpers__/shared_test_setup.js b/spec/frontend/__helpers__/shared_test_setup.js index 2fe9fe89a90ec2..7fc81cf65481e6 100644 --- a/spec/frontend/__helpers__/shared_test_setup.js +++ b/spec/frontend/__helpers__/shared_test_setup.js @@ -1,4 +1,5 @@ /* Common setup for both unit and integration test environments */ +import { ReadableStream, WritableStream } from 'node:stream/web'; import * as jqueryMatchers from 'custom-jquery-matchers'; import Vue from 'vue'; import { enableAutoDestroy } from '@vue/test-utils'; @@ -13,6 +14,9 @@ import './dom_shims'; import './jquery'; import '~/commons/bootstrap'; +global.ReadableStream = ReadableStream; +global.WritableStream = WritableStream; + enableAutoDestroy(afterEach); // This module has some fairly decent visual test coverage in it's own repository. diff --git a/spec/frontend/streaming/chunk_writer_spec.js b/spec/frontend/streaming/chunk_writer_spec.js new file mode 100644 index 00000000000000..bc4ba1021ffce2 --- /dev/null +++ b/spec/frontend/streaming/chunk_writer_spec.js @@ -0,0 +1,52 @@ +import { createChunkWriter } from '~/streaming/chunk_writer'; + +jest.mock('~/streaming/constants', () => { + return { + HIGH_FRAME_TIME: 0, + LOW_FRAME_TIME: 0, + MAX_CHUNK_SIZE: 1, + MIN_CHUNK_SIZE: 1, + }; +}); + +describe('createChunkWriter', () => { + let accumulator = ''; + let write; + + const createChunk = (text) => { + const encoder = new TextEncoder(); + return encoder.encode(text); + }; + + const createWriter = () => { + write = jest.fn((part) => { + accumulator += part; + }); + return createChunkWriter(write); + }; + + beforeEach(() => { + jest.spyOn(window, 'requestAnimationFrame').mockImplementation((cb) => cb(0)); + }); + + afterEach(() => { + window.requestAnimationFrame.mockRestore(); + accumulator = ''; + }); + + it('calls write for the split chunks', () => { + const text = 'foobar'; + const [writeChunk] = createWriter(); + writeChunk(createChunk(text)); + expect(accumulator).toBe(text); + expect(write).toHaveBeenCalledTimes(6); + }); + + it('handles emoji chunks', () => { + const text = 'fooπŸ‘€barπŸ‘¨β€πŸ‘©β€πŸ‘§bazπŸ‘§πŸ‘§πŸ»πŸ‘§πŸΌπŸ‘§πŸ½πŸ‘§πŸΎπŸ‘§πŸΏ'; + const [writeChunk] = createWriter(); + writeChunk(createChunk(text)); + expect(accumulator).toBe(text); + expect(write).toHaveBeenCalledTimes(75); + }); +}); diff --git a/spec/frontend/streaming/handle_streamed_anchor_link_spec.js b/spec/frontend/streaming/handle_streamed_anchor_link_spec.js new file mode 100644 index 00000000000000..b051edad361125 --- /dev/null +++ b/spec/frontend/streaming/handle_streamed_anchor_link_spec.js @@ -0,0 +1,75 @@ +import { resetHTMLFixture, setHTMLFixture } from 'helpers/fixtures'; +import waitForPromises from 'helpers/wait_for_promises'; +import { handleStreamedAnchorLink } from '~/streaming/handle_streamed_anchor_link'; +import { scrollToElement } from '~/lib/utils/common_utils'; + +jest.mock('~/lib/utils/common_utils'); + +describe('handleStreamedAnchorLink', () => { + const anchorName = 'foo'; + const findRoot = () => document.querySelector('#root'); + + afterEach(() => { + resetHTMLFixture(); + }); + + describe('when anchor is given', () => { + beforeEach(() => { + delete window.location; + window.location = new URL(`https://www.example.com#${anchorName}`); + }); + + describe('when element is present', () => { + beforeEach(() => { + setHTMLFixture(`
`); + handleStreamedAnchorLink(findRoot()); + }); + + it('does nothing', async () => { + await waitForPromises(); + expect(scrollToElement).not.toHaveBeenCalled(); + }); + }); + + describe('when element is streamed', () => { + let stop; + const insertElement = () => { + findRoot().insertAdjacentHTML('afterbegin', `
`); + }; + + beforeEach(() => { + setHTMLFixture('
'); + stop = handleStreamedAnchorLink(findRoot()); + }); + + afterEach(() => { + stop = undefined; + }); + + it('scrolls to the anchor when inserted', async () => { + insertElement(); + await waitForPromises(); + expect(scrollToElement).toHaveBeenCalledTimes(1); + }); + + it("doesn't scroll to the anchor when destroyed", async () => { + stop(); + insertElement(); + await waitForPromises(); + expect(scrollToElement).not.toHaveBeenCalled(); + }); + }); + }); + + describe('when anchor is not given', () => { + beforeEach(() => { + setHTMLFixture(`
`); + handleStreamedAnchorLink(findRoot()); + }); + + it('does nothing', async () => { + await waitForPromises(); + expect(scrollToElement).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/spec/frontend/streaming/html_stream_spec.js b/spec/frontend/streaming/html_stream_spec.js new file mode 100644 index 00000000000000..148acc135d7f41 --- /dev/null +++ b/spec/frontend/streaming/html_stream_spec.js @@ -0,0 +1,32 @@ +import { HtmlStream } from '~/streaming/html_stream'; + +describe('HtmlStream', () => { + let write; + let close; + let streamingElement; + + beforeEach(() => { + write = jest.fn(); + close = jest.fn(); + jest.spyOn(Document.prototype, 'write').mockImplementation(write); + jest.spyOn(Document.prototype, 'close').mockImplementation(close); + jest.spyOn(Document.prototype, 'querySelector').mockImplementation(() => { + streamingElement = document.createElement('div'); + return streamingElement; + }); + }); + + it('attaches to original document', () => { + // eslint-disable-next-line no-new + new HtmlStream(document.body); + expect(document.body.contains(streamingElement)).toBe(true); + }); + + it('can write to a document', () => { + const htmlStream = new HtmlStream(document.body); + htmlStream.write('foo'); + htmlStream.close(); + expect(write.mock.calls).toEqual([[''], ['foo'], ['']]); + expect(close).toHaveBeenCalledTimes(1); + }); +}); diff --git a/spec/frontend/streaming/render_balancer_spec.js b/spec/frontend/streaming/render_balancer_spec.js new file mode 100644 index 00000000000000..6a0fba5a216456 --- /dev/null +++ b/spec/frontend/streaming/render_balancer_spec.js @@ -0,0 +1,71 @@ +import { RenderBalancer } from '~/streaming/render_balancer'; + +const HIGH_FRAME_TIME = 100; +const LOW_FRAME_TIME = 10; + +describe('renderBalancer', () => { + let frameTime = 0; + let frameTimeIncrease = 0; + let frameTimeDecrease = 0; + let decrease; + let increase; + const createBalancer = () => { + decrease = jest.fn(); + increase = jest.fn(); + return new RenderBalancer({ + highFrameTime: HIGH_FRAME_TIME, + lowFrameTime: LOW_FRAME_TIME, + increase, + decrease, + }); + }; + const renderTimes = (times) => { + const balancer = createBalancer(); + return new Promise((resolve) => { + let counter = 0; + balancer.render(() => { + if (counter === times) { + resolve(counter); + return true; + } + counter += 1; + return false; + }); + }); + }; + + beforeEach(() => { + jest.spyOn(window, 'requestAnimationFrame').mockImplementation((cb) => { + frameTime += frameTimeIncrease; + frameTime -= frameTimeDecrease; + cb(frameTime); + }); + }); + + afterEach(() => { + window.requestAnimationFrame.mockRestore(); + frameTime = 0; + frameTimeIncrease = 0; + frameTimeDecrease = 0; + }); + + it('renders in a loop', async () => { + const count = await renderTimes(5); + expect(count).toBe(5); + }); + + it('calls decrease', async () => { + frameTimeIncrease = 200; + await renderTimes(5); + expect(decrease).toHaveBeenCalled(); + expect(increase).not.toHaveBeenCalled(); + }); + + it('calls increase', async () => { + frameTimeIncrease = 201; + frameTimeDecrease = 200; + await renderTimes(5); + expect(increase).toHaveBeenCalled(); + expect(decrease).not.toHaveBeenCalled(); + }); +}); diff --git a/spec/frontend/streaming/render_html_streams_spec.js b/spec/frontend/streaming/render_html_streams_spec.js index 56b616732fba94..61894a8455204a 100644 --- a/spec/frontend/streaming/render_html_streams_spec.js +++ b/spec/frontend/streaming/render_html_streams_spec.js @@ -4,6 +4,14 @@ import { HtmlStream } from '~/streaming/html_stream'; import waitForPromises from 'helpers/wait_for_promises'; jest.mock('~/streaming/html_stream'); +jest.mock('~/streaming/constants', () => { + return { + HIGH_FRAME_TIME: 0, + LOW_FRAME_TIME: 0, + MAX_CHUNK_SIZE: 1, + MIN_CHUNK_SIZE: 1, + }; +}); const firstStreamContent = 'foobar'; const secondStreamContent = 'bazqux'; diff --git a/spec/frontend/test_setup.js b/spec/frontend/test_setup.js index 2771e8673171ff..3fb226e5ed3cd1 100644 --- a/spec/frontend/test_setup.js +++ b/spec/frontend/test_setup.js @@ -1,12 +1,8 @@ /* Setup for unit test environment */ // eslint-disable-next-line no-restricted-syntax import { setImmediate } from 'timers'; -import { ReadableStream, WritableStream } from 'node:stream/web'; import 'helpers/shared_test_setup'; -global.ReadableStream = ReadableStream; -global.WritableStream = WritableStream; - afterEach(() => // give Promises a bit more time so they fail the right test // eslint-disable-next-line no-restricted-syntax -- GitLab From fdb7b39959da7dff6305829b9792b6752287cf58 Mon Sep 17 00:00:00 2001 From: Stanislav Lashmanov Date: Wed, 1 Feb 2023 02:19:22 +0400 Subject: [PATCH 07/19] Optimize Blame full page view loading Serve 200 lines on page load, then chunks of 10000 lines per page Make one page request immediately, fetch the rest after bundle starts Adjust streaming constants to have a smoother page load --- .../javascripts/blame/streaming/index.js | 15 ++++++-- .../pages/projects/blame/show/index.js | 2 +- .../javascripts/streaming/chunk_writer.js | 4 +-- app/assets/javascripts/streaming/constants.js | 9 +++-- app/services/projects/blame_service.rb | 21 ++++++++--- app/views/projects/blame/show.html.haml | 36 ++++++++----------- 6 files changed, 52 insertions(+), 35 deletions(-) diff --git a/app/assets/javascripts/blame/streaming/index.js b/app/assets/javascripts/blame/streaming/index.js index b2f23eae9b4d23..c725580f944dea 100644 --- a/app/assets/javascripts/blame/streaming/index.js +++ b/app/assets/javascripts/blame/streaming/index.js @@ -4,15 +4,24 @@ import { createAlert } from '~/flash'; import { __ } from '~/locale'; import Sentry from '~/sentry'; -export async function renderBlamePageStreams(streamPromises) { +export async function renderBlamePageStreams(firstStreamPromise) { const element = document.querySelector('#blame-stream-container'); - if (!element || !streamPromises) return; + if (!element || !firstStreamPromise) return; const stopAnchorObserver = handleStreamedAnchorLink(element); + const { dataset } = document.querySelector('#blob-content-holder'); + const totalExtraPages = parseInt(dataset.totalExtraPages, 10); + + const remainingStreams = Array.from({ length: totalExtraPages }, (v, i) => { + const url = new URL(window.location); + url.pathname = url.pathname.replace('-/blame/', '-/blame_page/'); + url.searchParams.set('page', i + 3); + return fetch(url).then((response) => response.body); + }); try { - await renderHtmlStreams(streamPromises, element); + await renderHtmlStreams([firstStreamPromise, ...remainingStreams], element); } catch (error) { createAlert({ message: __('Failed to fully load the Blame page. Try to reload the page.') }); Sentry.captureException(error); diff --git a/app/assets/javascripts/pages/projects/blame/show/index.js b/app/assets/javascripts/pages/projects/blame/show/index.js index d45144ba84aa90..ac538a066f4d3d 100644 --- a/app/assets/javascripts/pages/projects/blame/show/index.js +++ b/app/assets/javascripts/pages/projects/blame/show/index.js @@ -3,5 +3,5 @@ import redirectToCorrectPage from '~/blame/blame_redirect'; import { renderBlamePageStreams } from '~/blame/streaming'; redirectToCorrectPage(); -renderBlamePageStreams(window.blamePageStreams); +renderBlamePageStreams(window.blamePageStream); initBlob(); diff --git a/app/assets/javascripts/streaming/chunk_writer.js b/app/assets/javascripts/streaming/chunk_writer.js index 40753f05651d77..66811aa02ac3fb 100644 --- a/app/assets/javascripts/streaming/chunk_writer.js +++ b/app/assets/javascripts/streaming/chunk_writer.js @@ -23,10 +23,10 @@ export const createChunkWriter = (write) => { lowFrameTime: LOW_FRAME_TIME, highFrameTime: HIGH_FRAME_TIME, decrease() { - size = Math.max(size / BALANCE_RATE, MIN_CHUNK_SIZE); + size = Math.round(Math.max(size / BALANCE_RATE, MIN_CHUNK_SIZE)); }, increase() { - size = Math.min(size * BALANCE_RATE, MAX_CHUNK_SIZE); + size = Math.round(Math.min(size * BALANCE_RATE, MAX_CHUNK_SIZE)); }, }); diff --git a/app/assets/javascripts/streaming/constants.js b/app/assets/javascripts/streaming/constants.js index fef5dc602ee87f..1b2334cf385f9d 100644 --- a/app/assets/javascripts/streaming/constants.js +++ b/app/assets/javascripts/streaming/constants.js @@ -1,5 +1,8 @@ +// Lower min chunk numbers can make the page loading incredibly long export const MIN_CHUNK_SIZE = 128 * 1024; -export const MAX_CHUNK_SIZE = 1024 * 1024; +export const MAX_CHUNK_SIZE = 2048 * 1024; export const LOW_FRAME_TIME = 32; -export const HIGH_FRAME_TIME = 48; -export const BALANCE_RATE = 1.5; +// Tasks that take more than 50ms are considered Long +// https://web.dev/optimize-long-tasks/ +export const HIGH_FRAME_TIME = 64; +export const BALANCE_RATE = 1.2; diff --git a/app/services/projects/blame_service.rb b/app/services/projects/blame_service.rb index 12e8f80acbbac2..d482bad6275983 100644 --- a/app/services/projects/blame_service.rb +++ b/app/services/projects/blame_service.rb @@ -5,7 +5,8 @@ module Projects class BlameService PER_PAGE = 1000 - PER_PAGE_STREAMING = 2000 + STREAMING_FIRST_PAGE_SIZE = 200 + STREAMING_PER_PAGE = 10000 def initialize(blob, commit, params) @blob = blob @@ -30,13 +31,16 @@ def pagination end def per_page - return PER_PAGE_STREAMING if @streaming_enabled + return STREAMING_PER_PAGE if @streaming_enabled PER_PAGE end def total_extra_pages - (blob_lines_count / per_page).ceil + total = blob_lines_count + total = blob_lines_count - STREAMING_FIRST_PAGE_SIZE + per_page if @streaming_enabled + + (total / per_page).ceil end def streaming_possible @@ -54,9 +58,16 @@ def streaming_state(params) attr_reader :blob, :commit, :pagination_enabled def blame_range - return unless pagination_enabled + return unless pagination_enabled || @streaming_enabled first_line = (page - 1) * per_page + 1 + + if @streaming_enabled + return 1..STREAMING_FIRST_PAGE_SIZE if page == 1 + + first_line = STREAMING_FIRST_PAGE_SIZE + (page - 2) * per_page + 1 + end + last_line = (first_line + per_page).to_i - 1 first_line..last_line @@ -77,7 +88,7 @@ def pagination_state(params) end def overlimit?(page) - page * per_page >= blob_lines_count + per_page + page > total_extra_pages end def blob_lines_count diff --git a/app/views/projects/blame/show.html.haml b/app/views/projects/blame/show.html.haml index 598a164a687f0b..31ed5154780c22 100644 --- a/app/views/projects/blame/show.html.haml +++ b/app/views/projects/blame/show.html.haml @@ -1,19 +1,17 @@ - page_title _("Blame"), @blob.path, @ref - add_page_specific_style 'page_bundles/tree' - dataset = { testid: 'blob-content-holder', qa_selector: 'blame_file_content', per_page: @blame_per_page } -- if @streaming_enabled +- if @streaming_enabled && total_extra_pages > 0 - content_for :startup_js do = javascript_tag do :plain - window.blamePageStreams = Array.from( - { length: #{total_extra_pages} }, - (v, i) => { - const url = new URL(window.location); - url.pathname = url.pathname.replace('-/blame/', '-/blame_page/'); - url.searchParams.set('page', i + 2); - return fetch(url).then(response => response.body); - }, - ); + window.blamePageStream = (() => { + const url = new URL(window.location); + url.pathname = url.pathname.replace('-/blame/', '-/blame_page/'); + url.searchParams.set('page', 2); + return fetch(url).then(response => response.body); + })(); +- dataset = { testid: 'blob-content-holder', qa_selector: 'blame_file_content', per_page: @blame_per_page, total_extra_pages: total_extra_pages - 1 } #blob-content-holder.tree-holder.js-per-page{ data: dataset } = render "projects/blob/breadcrumb", blob: @blob, blame: true @@ -41,23 +39,19 @@ - if @streaming_enabled #blame-stream-container.blame-stream-container - #blame-stream-loading.blame-stream-loading - .gradient - %span.gl-mx-2 - = _('Loading blame file...') - = gl_loading_icon(size: 'md') - - - if @blame_pagination && @blame_pagination.total_pages > 1 - .gl-display-flex.gl-justify-content-center.gl-flex-direction-column.gl-align-items-center.gl-p-3.gl-bg-gray-50.gl-border-t-solid.gl-border-t-1.gl-border-gray-100 - = _('For faster browsing, not all history is shown.') - = render Pajamas::ButtonComponent.new(href: namespace_project_blame_path(namespace_id: @project.namespace, project_id: @project, id: @id, no_pagination: true), size: :small, button_options: { class: 'gl-mt-3' }) do |c| - = _('View entire blame') - if @blame_pagination && @blame_pagination.total_pages > 1 .gl-display-flex.gl-justify-content-center.gl-flex-direction-column.gl-align-items-center.gl-p-3.gl-bg-gray-50.gl-border-t-solid.gl-border-t-1.gl-border-gray-100 = render Pajamas::ButtonComponent.new(href: @entire_blame_path, size: :small, button_options: { class: 'gl-mt-3' }) do |c| = _('View entire blame') + - if @streaming_enabled + #blame-stream-loading.blame-stream-loading + .gradient + %span.gl-mx-2 + = _('Loading blame file...') + = gl_loading_icon(size: 'md') + - if @blame_pagination = paginate(@blame_pagination, theme: "gitlab") -- GitLab From 64837d1942d28ec7ddbac50f19278fc0317819e8 Mon Sep 17 00:00:00 2001 From: Stanislav Lashmanov Date: Fri, 3 Feb 2023 04:29:39 +0400 Subject: [PATCH 08/19] Improve tests Provide blame pages url to frontend --- .../javascripts/blame/streaming/index.js | 7 +- .../javascripts/streaming/chunk_writer.js | 24 +++- app/assets/javascripts/streaming/constants.js | 2 +- .../javascripts/streaming/render_balancer.js | 5 +- .../streaming/render_html_streams.js | 32 +++-- app/controllers/projects/blame_controller.rb | 5 + app/views/projects/blame/show.html.haml | 5 +- spec/features/projects/blobs/blame_spec.rb | 42 ++++-- spec/frontend/blame/streaming/index_spec.js | 96 ++++++++++++++ spec/frontend/streaming/chunk_writer_spec.js | 124 ++++++++++++++---- .../handle_streamed_anchor_link_spec.js | 3 + .../streaming/render_balancer_spec.js | 16 +-- .../streaming/render_html_streams_spec.js | 14 ++ 13 files changed, 303 insertions(+), 72 deletions(-) create mode 100644 spec/frontend/blame/streaming/index_spec.js diff --git a/app/assets/javascripts/blame/streaming/index.js b/app/assets/javascripts/blame/streaming/index.js index c725580f944dea..647621a211e9f8 100644 --- a/app/assets/javascripts/blame/streaming/index.js +++ b/app/assets/javascripts/blame/streaming/index.js @@ -2,7 +2,6 @@ import { renderHtmlStreams } from '~/streaming/render_html_streams'; import { handleStreamedAnchorLink } from '~/streaming/handle_streamed_anchor_link'; import { createAlert } from '~/flash'; import { __ } from '~/locale'; -import Sentry from '~/sentry'; export async function renderBlamePageStreams(firstStreamPromise) { const element = document.querySelector('#blame-stream-container'); @@ -12,10 +11,10 @@ export async function renderBlamePageStreams(firstStreamPromise) { const stopAnchorObserver = handleStreamedAnchorLink(element); const { dataset } = document.querySelector('#blob-content-holder'); const totalExtraPages = parseInt(dataset.totalExtraPages, 10); + const { pagesUrl } = dataset; const remainingStreams = Array.from({ length: totalExtraPages }, (v, i) => { - const url = new URL(window.location); - url.pathname = url.pathname.replace('-/blame/', '-/blame_page/'); + const url = new URL(pagesUrl); url.searchParams.set('page', i + 3); return fetch(url).then((response) => response.body); }); @@ -24,7 +23,7 @@ export async function renderBlamePageStreams(firstStreamPromise) { await renderHtmlStreams([firstStreamPromise, ...remainingStreams], element); } catch (error) { createAlert({ message: __('Failed to fully load the Blame page. Try to reload the page.') }); - Sentry.captureException(error); + throw error; } finally { stopAnchorObserver(); document.querySelector('#blame-stream-loading').remove(); diff --git a/app/assets/javascripts/streaming/chunk_writer.js b/app/assets/javascripts/streaming/chunk_writer.js index 66811aa02ac3fb..f80fa2ff6a135a 100644 --- a/app/assets/javascripts/streaming/chunk_writer.js +++ b/app/assets/javascripts/streaming/chunk_writer.js @@ -7,6 +7,14 @@ import { MIN_CHUNK_SIZE, } from '~/streaming/constants'; +const defaultConfig = { + balanceRate: BALANCE_RATE, + minChunkSize: MIN_CHUNK_SIZE, + maxChunkSize: MAX_CHUNK_SIZE, + lowFrameTime: LOW_FRAME_TIME, + highFrameTime: HIGH_FRAME_TIME, +}; + function concatUint8Arrays(a, b) { const array = new Uint8Array(a.length + b.length); array.set(a, 0); @@ -14,19 +22,23 @@ function concatUint8Arrays(a, b) { return array; } -export const createChunkWriter = (write) => { +export const createChunkWriter = (write, config) => { + const { balanceRate, minChunkSize, maxChunkSize, lowFrameTime, highFrameTime } = { + ...defaultConfig, + ...config, + }; const decoder = new TextDecoder('utf-8'); - let size = MIN_CHUNK_SIZE; + let size = minChunkSize; const balancer = new RenderBalancer({ - lowFrameTime: LOW_FRAME_TIME, - highFrameTime: HIGH_FRAME_TIME, + lowFrameTime, + highFrameTime, decrease() { - size = Math.round(Math.max(size / BALANCE_RATE, MIN_CHUNK_SIZE)); + size = Math.round(Math.max(size / balanceRate, minChunkSize)); }, increase() { - size = Math.round(Math.min(size * BALANCE_RATE, MAX_CHUNK_SIZE)); + size = Math.round(Math.min(size * balanceRate, maxChunkSize)); }, }); diff --git a/app/assets/javascripts/streaming/constants.js b/app/assets/javascripts/streaming/constants.js index 1b2334cf385f9d..d1b736ea4a6123 100644 --- a/app/assets/javascripts/streaming/constants.js +++ b/app/assets/javascripts/streaming/constants.js @@ -1,4 +1,4 @@ -// Lower min chunk numbers can make the page loading incredibly long +// Lower min chunk numbers can make the page loading take incredibly long export const MIN_CHUNK_SIZE = 128 * 1024; export const MAX_CHUNK_SIZE = 2048 * 1024; export const LOW_FRAME_TIME = 32; diff --git a/app/assets/javascripts/streaming/render_balancer.js b/app/assets/javascripts/streaming/render_balancer.js index 8a862f219e7160..223c3b3e10b4ab 100644 --- a/app/assets/javascripts/streaming/render_balancer.js +++ b/app/assets/javascripts/streaming/render_balancer.js @@ -11,9 +11,8 @@ export class RenderBalancer { render(fn) { return new Promise((resolve) => { const callback = (timestamp) => { - const stop = fn(); this.#throttle(timestamp); - if (!stop) requestAnimationFrame(callback); + if (!fn()) requestAnimationFrame(callback); else resolve(); }; requestAnimationFrame(callback); @@ -23,7 +22,7 @@ export class RenderBalancer { #throttle(timestamp) { const { previousTimestamp } = this; this.previousTimestamp = timestamp; - if (!previousTimestamp) return; + if (previousTimestamp === undefined) return; const duration = Math.round(timestamp - previousTimestamp); if (!duration) return; diff --git a/app/assets/javascripts/streaming/render_html_streams.js b/app/assets/javascripts/streaming/render_html_streams.js index cdf03934958ef5..cd0bac203df223 100644 --- a/app/assets/javascripts/streaming/render_html_streams.js +++ b/app/assets/javascripts/streaming/render_html_streams.js @@ -1,14 +1,28 @@ import { HtmlStream } from '~/streaming/html_stream'; import { createChunkWriter } from '~/streaming/chunk_writer'; -export function renderHtmlStreams(streamPromises, element) { +async function pipeStreams(domWriter, streamPromises) { + try { + for await (const stream of streamPromises.slice(0, -1)) { + await stream.pipeTo(domWriter, { preventClose: true }); + } + const stream = await streamPromises[streamPromises.length - 1]; + await stream.pipeTo(domWriter); + } catch (error) { + domWriter.abort(error); + } +} + +export function renderHtmlStreams(streamPromises, element, config) { if (streamPromises.length === 0) return Promise.resolve(); const htmlStream = new HtmlStream(element); - const [handleChunk, finalizeRendering] = createChunkWriter((chunk) => htmlStream.write(chunk)); + const [handleChunk, finalizeRendering] = createChunkWriter( + (chunk) => htmlStream.write(chunk), + config, + ); - // eslint-disable-next-line no-async-promise-executor - return new Promise(async (resolve, reject) => { + return new Promise((resolve, reject) => { const domWriter = new WritableStream({ write(chunk) { return handleChunk(chunk); @@ -24,14 +38,6 @@ export function renderHtmlStreams(streamPromises, element) { }, }); - try { - for await (const stream of streamPromises.slice(0, -1)) { - await stream.pipeTo(domWriter, { preventClose: true }); - } - const stream = await streamPromises[streamPromises.length - 1]; - await stream.pipeTo(domWriter); - } catch (error) { - domWriter.abort(error); - } + pipeStreams(domWriter, streamPromises); }); } diff --git a/app/controllers/projects/blame_controller.rb b/app/controllers/projects/blame_controller.rb index dad3602388bde2..ae810305d1cdaf 100644 --- a/app/controllers/projects/blame_controller.rb +++ b/app/controllers/projects/blame_controller.rb @@ -29,6 +29,7 @@ def show @blame = Gitlab::View::Presenter::Factory.new(blame_service.blame, project: @project, path: @path, page: blame_service.page).fabricate! @entire_blame_path = full_blame_path(no_pagination: true) + @blame_pages_url = blame_pages_url(permitted_params) if blame_service.streaming_possible @entire_blame_path = full_blame_path(streaming: true) end @@ -60,6 +61,10 @@ def page def full_blame_path(params) namespace_project_blame_path(namespace_id: @project.namespace, project_id: @project, id: @id, **params) end + + def blame_pages_url(params) + namespace_project_blame_page_url(namespace_id: @project.namespace, project_id: @project, id: @id, **params) + end end Projects::BlameController.prepend_mod diff --git a/app/views/projects/blame/show.html.haml b/app/views/projects/blame/show.html.haml index 31ed5154780c22..2261f36454b435 100644 --- a/app/views/projects/blame/show.html.haml +++ b/app/views/projects/blame/show.html.haml @@ -6,12 +6,11 @@ = javascript_tag do :plain window.blamePageStream = (() => { - const url = new URL(window.location); - url.pathname = url.pathname.replace('-/blame/', '-/blame_page/'); + const url = new URL("#{@blame_pages_url}"); url.searchParams.set('page', 2); return fetch(url).then(response => response.body); })(); -- dataset = { testid: 'blob-content-holder', qa_selector: 'blame_file_content', per_page: @blame_per_page, total_extra_pages: total_extra_pages - 1 } +- dataset = { testid: 'blob-content-holder', qa_selector: 'blame_file_content', per_page: @blame_per_page, total_extra_pages: total_extra_pages - 1, pages_url: @blame_pages_url } #blob-content-holder.tree-holder.js-per-page{ data: dataset } = render "projects/blob/breadcrumb", blob: @blob, blame: true diff --git a/spec/features/projects/blobs/blame_spec.rb b/spec/features/projects/blobs/blame_spec.rb index 27b7c6ef2d52e9..7bba94038d6d42 100644 --- a/spec/features/projects/blobs/blame_spec.rb +++ b/spec/features/projects/blobs/blame_spec.rb @@ -85,19 +85,43 @@ def visit_blob_blame(path) end end - context 'when user clicks on View entire blame button' do + shared_examples 'a full blame page' do + context 'when user clicks on View entire blame button' do + before do + visit_blob_blame(path) + click_link _('View entire blame') + end + + it 'displays the blame page without pagination' do + within '[data-testid="blob-content-holder"]' do + expect(page).to have_css('#L1') + expect(page).to have_css('#L667') + expect(page).not_to have_css('.gl-pagination') + end + end + end + end + + context 'when streaming is disabled' do before do - visit_blob_blame(path) + stub_feature_flags(blame_page_streaming: false) end - it 'displays the blame page without pagination' do - within '[data-testid="blob-content-holder"]' do - click_link _('View entire blame') + it_behaves_like 'a full blame page' + end - expect(page).to have_css('#L1') - expect(page).to have_css('#L3') - expect(page).not_to have_css('.gl-pagination') - end + context 'when streaming is enabled' do + before do + stub_const('Projects::BlameService::STREAMING_PER_PAGE', 50) + stub_feature_flags(blame_page_streaming: true) + end + + it_behaves_like 'a full blame page' + + it 'shows loading text' do + visit_blob_blame(path) + click_link _('View entire blame') + expect(page).to have_text('Loading blame file...') end end diff --git a/spec/frontend/blame/streaming/index_spec.js b/spec/frontend/blame/streaming/index_spec.js new file mode 100644 index 00000000000000..f781becf0fefbc --- /dev/null +++ b/spec/frontend/blame/streaming/index_spec.js @@ -0,0 +1,96 @@ +import waitForPromises from 'helpers/wait_for_promises'; +import { renderBlamePageStreams } from '~/blame/streaming'; +import { setHTMLFixture } from 'helpers/fixtures'; +import { renderHtmlStreams } from '~/streaming/render_html_streams'; +import { handleStreamedAnchorLink } from '~/streaming/handle_streamed_anchor_link'; +import { createAlert } from '~/flash'; + +jest.mock('~/streaming/render_html_streams'); +jest.mock('~/streaming/handle_streamed_anchor_link'); +jest.mock('~/sentry'); +jest.mock('~/flash'); + +global.fetch = jest.fn(); + +describe('renderBlamePageStreams', () => { + let stopAnchor; + const PAGES_URL = 'https://example.com/'; + const findStreamContainer = () => document.querySelector('#blame-stream-container'); + const findStreamLoadingIndicator = () => document.querySelector('#blame-stream-loading'); + + const setupHtml = (totalExtraPages = 0) => { + setHTMLFixture(` +
+
+
+ `); + }; + + handleStreamedAnchorLink.mockImplementation(() => stopAnchor); + + beforeEach(() => { + stopAnchor = jest.fn(); + fetch.mockClear(); + }); + + it('does nothing for an empty page', async () => { + await renderBlamePageStreams(); + + expect(handleStreamedAnchorLink).not.toHaveBeenCalled(); + expect(renderHtmlStreams).not.toHaveBeenCalled(); + }); + + it('renders a single stream', async () => { + let res; + const stream = new Promise((resolve) => { + res = resolve; + }); + renderHtmlStreams.mockImplementationOnce(() => stream); + setupHtml(); + + renderBlamePageStreams(stream); + + expect(handleStreamedAnchorLink).toHaveBeenCalledTimes(1); + expect(stopAnchor).toHaveBeenCalledTimes(0); + expect(renderHtmlStreams).toHaveBeenCalledWith([stream], findStreamContainer()); + expect(findStreamLoadingIndicator()).not.toBe(null); + + res(); + await waitForPromises(); + + expect(stopAnchor).toHaveBeenCalledTimes(1); + expect(findStreamLoadingIndicator()).toBe(null); + }); + + it('renders rest of the streams', async () => { + const stream = Promise.resolve(); + const stream2 = Promise.resolve({ body: null }); + fetch.mockImplementationOnce(() => stream2); + setupHtml(1); + + await renderBlamePageStreams(stream); + + expect(fetch.mock.calls[0][0].toString()).toBe(`${PAGES_URL}?page=3`); + expect(renderHtmlStreams).toHaveBeenCalledWith([stream, stream2], findStreamContainer()); + }); + + it('shows an error message when failed', async () => { + const stream = Promise.resolve(); + const error = new Error(); + renderHtmlStreams.mockImplementationOnce(() => Promise.reject(error)); + setupHtml(); + + try { + await renderBlamePageStreams(stream); + } catch (err) { + expect(err).toBe(error); + } + + expect(createAlert).toHaveBeenCalledWith({ + message: 'Failed to fully load the Blame page. Try to reload the page.', + }); + }); +}); diff --git a/spec/frontend/streaming/chunk_writer_spec.js b/spec/frontend/streaming/chunk_writer_spec.js index bc4ba1021ffce2..845cc1b87a03de 100644 --- a/spec/frontend/streaming/chunk_writer_spec.js +++ b/spec/frontend/streaming/chunk_writer_spec.js @@ -1,17 +1,12 @@ import { createChunkWriter } from '~/streaming/chunk_writer'; -jest.mock('~/streaming/constants', () => { - return { - HIGH_FRAME_TIME: 0, - LOW_FRAME_TIME: 0, - MAX_CHUNK_SIZE: 1, - MIN_CHUNK_SIZE: 1, - }; -}); - describe('createChunkWriter', () => { let accumulator = ''; let write; + let config; + let frameTime; + let frameIndex; + let frameTimeDeltas; const createChunk = (text) => { const encoder = new TextEncoder(); @@ -22,31 +17,112 @@ describe('createChunkWriter', () => { write = jest.fn((part) => { accumulator += part; }); - return createChunkWriter(write); + return createChunkWriter(write, config); + }; + + const pushChunks = (...chunks) => { + const [writeChunk, finalize] = createWriter(); + chunks.forEach((chunk) => { + writeChunk(createChunk(chunk)); + }); + finalize(); }; beforeEach(() => { - jest.spyOn(window, 'requestAnimationFrame').mockImplementation((cb) => cb(0)); + accumulator = ''; + config = undefined; + frameTime = 0; + frameIndex = 0; + frameTimeDeltas = []; + jest.spyOn(window, 'requestAnimationFrame').mockImplementation((cb) => { + cb(frameTime); + frameTime += frameTimeDeltas[frameIndex] || 0; + frameIndex += 1; + }); }); afterEach(() => { window.requestAnimationFrame.mockRestore(); - accumulator = ''; + jest.resetAllMocks(); }); - it('calls write for the split chunks', () => { - const text = 'foobar'; - const [writeChunk] = createWriter(); - writeChunk(createChunk(text)); - expect(accumulator).toBe(text); - expect(write).toHaveBeenCalledTimes(6); + describe('when chunk length must be "1"', () => { + beforeEach(() => { + config = { minChunkSize: 1, maxChunkSize: 1 }; + }); + + it('splits big chunks into smaller ones', () => { + const text = 'foobar'; + pushChunks(text); + expect(accumulator).toBe(text); + expect(write).toHaveBeenCalledTimes(text.length); + }); + + it('handles small emoji chunks', () => { + const text = 'fooπŸ‘€barπŸ‘¨β€πŸ‘©β€πŸ‘§bazπŸ‘§πŸ‘§πŸ»πŸ‘§πŸΌπŸ‘§πŸ½πŸ‘§πŸΎπŸ‘§πŸΏ'; + pushChunks(text); + expect(accumulator).toBe(text); + expect(write).toHaveBeenCalledTimes(createChunk(text).length); + }); }); - it('handles emoji chunks', () => { - const text = 'fooπŸ‘€barπŸ‘¨β€πŸ‘©β€πŸ‘§bazπŸ‘§πŸ‘§πŸ»πŸ‘§πŸΌπŸ‘§πŸ½πŸ‘§πŸΎπŸ‘§πŸΏ'; - const [writeChunk] = createWriter(); - writeChunk(createChunk(text)); - expect(accumulator).toBe(text); - expect(write).toHaveBeenCalledTimes(75); + describe('when chunk length must not be less than "10"', () => { + beforeEach(() => { + config = { minChunkSize: 5 }; + }); + + it('joins small chunks', () => { + const text = '12345'; + pushChunks(...text.split('')); + expect(accumulator).toBe(text); + expect(write).toHaveBeenCalledTimes(1); + }); + + it('handles overflow with small chunks', () => { + const text = '123456'; + pushChunks(...text.split('')); + expect(accumulator).toBe(text); + expect(write).toHaveBeenCalledTimes(2); + }); + }); + + describe('when frame time exceeds low limit', () => { + beforeEach(() => { + frameTimeDeltas = [10]; + config = { + minChunkSize: 1, + maxChunkSize: 5, + balanceRate: 10, + lowFrameTime: 100, + }; + }); + + it('increases chunk size', () => { + const text = '1222223'; + pushChunks(...text.split('')); + expect(accumulator).toBe(text); + expect(write.mock.calls).toMatchObject([['1'], ['22222'], ['3']]); + }); + }); + + describe('when frame time exceeds high limit', () => { + beforeEach(() => { + // using 0 to accumulate and wait for write + frameTimeDeltas = [5, 0, 5, 0, 30, 0, 30, 0]; + config = { + minChunkSize: 1, + maxChunkSize: 3, + balanceRate: 2, + lowFrameTime: 10, + highFrameTime: 20, + }; + }); + + it('decreases chunk size', () => { + const text = '122333221'; + pushChunks(...text.split('')); + expect(accumulator).toBe(text); + expect(write.mock.calls).toMatchObject([['1'], ['22'], ['333'], ['22'], ['1']]); + }); }); }); diff --git a/spec/frontend/streaming/handle_streamed_anchor_link_spec.js b/spec/frontend/streaming/handle_streamed_anchor_link_spec.js index b051edad361125..aa6dd0c6ffa5cf 100644 --- a/spec/frontend/streaming/handle_streamed_anchor_link_spec.js +++ b/spec/frontend/streaming/handle_streamed_anchor_link_spec.js @@ -2,8 +2,10 @@ import { resetHTMLFixture, setHTMLFixture } from 'helpers/fixtures'; import waitForPromises from 'helpers/wait_for_promises'; import { handleStreamedAnchorLink } from '~/streaming/handle_streamed_anchor_link'; import { scrollToElement } from '~/lib/utils/common_utils'; +import LineHighlighter from '~/blob/line_highlighter'; jest.mock('~/lib/utils/common_utils'); +jest.mock('~/blob/line_highlighter'); describe('handleStreamedAnchorLink', () => { const anchorName = 'foo'; @@ -50,6 +52,7 @@ describe('handleStreamedAnchorLink', () => { insertElement(); await waitForPromises(); expect(scrollToElement).toHaveBeenCalledTimes(1); + expect(LineHighlighter).toHaveBeenCalledTimes(1); }); it("doesn't scroll to the anchor when destroyed", async () => { diff --git a/spec/frontend/streaming/render_balancer_spec.js b/spec/frontend/streaming/render_balancer_spec.js index 6a0fba5a216456..aa960bc2014e42 100644 --- a/spec/frontend/streaming/render_balancer_spec.js +++ b/spec/frontend/streaming/render_balancer_spec.js @@ -5,10 +5,10 @@ const LOW_FRAME_TIME = 10; describe('renderBalancer', () => { let frameTime = 0; - let frameTimeIncrease = 0; - let frameTimeDecrease = 0; + let frameTimeDelta = 0; let decrease; let increase; + const createBalancer = () => { decrease = jest.fn(); increase = jest.fn(); @@ -19,6 +19,7 @@ describe('renderBalancer', () => { decrease, }); }; + const renderTimes = (times) => { const balancer = createBalancer(); return new Promise((resolve) => { @@ -36,8 +37,7 @@ describe('renderBalancer', () => { beforeEach(() => { jest.spyOn(window, 'requestAnimationFrame').mockImplementation((cb) => { - frameTime += frameTimeIncrease; - frameTime -= frameTimeDecrease; + frameTime += frameTimeDelta; cb(frameTime); }); }); @@ -45,8 +45,7 @@ describe('renderBalancer', () => { afterEach(() => { window.requestAnimationFrame.mockRestore(); frameTime = 0; - frameTimeIncrease = 0; - frameTimeDecrease = 0; + frameTimeDelta = 0; }); it('renders in a loop', async () => { @@ -55,15 +54,14 @@ describe('renderBalancer', () => { }); it('calls decrease', async () => { - frameTimeIncrease = 200; + frameTimeDelta = 200; await renderTimes(5); expect(decrease).toHaveBeenCalled(); expect(increase).not.toHaveBeenCalled(); }); it('calls increase', async () => { - frameTimeIncrease = 201; - frameTimeDecrease = 200; + frameTimeDelta = 1; await renderTimes(5); expect(increase).toHaveBeenCalled(); expect(decrease).not.toHaveBeenCalled(); diff --git a/spec/frontend/streaming/render_html_streams_spec.js b/spec/frontend/streaming/render_html_streams_spec.js index 61894a8455204a..0a460945045889 100644 --- a/spec/frontend/streaming/render_html_streams_spec.js +++ b/spec/frontend/streaming/render_html_streams_spec.js @@ -84,4 +84,18 @@ describe('renderHtmlStreams', () => { expect(htmlAccumulator).toContain(firstStreamContent + secondStreamContent); expect(closeSpy).toHaveBeenCalledTimes(1); }); + + it('closes HtmlStream on error', async () => { + const closeSpy = jest.spyOn(HtmlStream.prototype, 'close'); + const stream1 = createSingleChunkStream(firstStreamContent); + const error = new Error(); + + try { + await renderHtmlStreams([Promise.resolve(stream1), Promise.reject(error)], document.body); + } catch (err) { + expect(err).toBe(error); + } + + expect(closeSpy).toHaveBeenCalledTimes(1); + }); }); -- GitLab From d2069e45eb3d6b8ecf8b53fe767f84195495ff08 Mon Sep 17 00:00:00 2001 From: Stanislav Lashmanov Date: Thu, 9 Feb 2023 01:14:23 +0400 Subject: [PATCH 09/19] Provide rendering error fallback Refactor blame page streaming initialization Use streaming: false for the last decoded chunk Use adoptNode for the external DOM node Refactor to use public methods instead of private Add polyfills for Web Streams API --- app/assets/javascripts/blame/blame_redirect.js | 1 - app/assets/javascripts/blame/streaming/index.js | 12 +++++++++++- .../javascripts/pages/projects/blame/show/index.js | 7 +++++-- app/assets/javascripts/streaming/chunk_writer.js | 6 ++++-- .../streaming/handle_streamed_anchor_link.js | 6 +++--- app/assets/javascripts/streaming/html_stream.js | 2 +- app/assets/javascripts/streaming/render_balancer.js | 4 ++-- .../javascripts/streaming/render_html_streams.js | 7 +++++-- locale/gitlab.pot | 5 ++++- package.json | 2 ++ spec/frontend/blame/streaming/index_spec.js | 6 +++++- yarn.lock | 5 +++++ 12 files changed, 47 insertions(+), 16 deletions(-) diff --git a/app/assets/javascripts/blame/blame_redirect.js b/app/assets/javascripts/blame/blame_redirect.js index f63bbca2dae502..155e2a3a2cdc49 100644 --- a/app/assets/javascripts/blame/blame_redirect.js +++ b/app/assets/javascripts/blame/blame_redirect.js @@ -3,7 +3,6 @@ import { createAlert } from '~/flash'; import { __ } from '~/locale'; export default function redirectToCorrectBlamePage() { - if (new URLSearchParams(window.location.search).get('streaming')) return; const { hash } = window.location; const linesPerPage = parseInt(document.querySelector('.js-per-page').dataset.perPage, 10); const params = new URLSearchParams(window.location.search); diff --git a/app/assets/javascripts/blame/streaming/index.js b/app/assets/javascripts/blame/streaming/index.js index 647621a211e9f8..2f8c77df29d392 100644 --- a/app/assets/javascripts/blame/streaming/index.js +++ b/app/assets/javascripts/blame/streaming/index.js @@ -22,7 +22,17 @@ export async function renderBlamePageStreams(firstStreamPromise) { try { await renderHtmlStreams([firstStreamPromise, ...remainingStreams], element); } catch (error) { - createAlert({ message: __('Failed to fully load the Blame page. Try to reload the page.') }); + createAlert({ + message: __('Failed to fully load the blame file.'), + primaryButton: { + text: __('Show paginated blame file'), + clickHandler() { + const newUrl = new URL(window.location); + newUrl.searchParams.delete('streaming'); + window.location.href = newUrl; + }, + }, + }); throw error; } finally { stopAnchorObserver(); diff --git a/app/assets/javascripts/pages/projects/blame/show/index.js b/app/assets/javascripts/pages/projects/blame/show/index.js index ac538a066f4d3d..f0fdd18c828dad 100644 --- a/app/assets/javascripts/pages/projects/blame/show/index.js +++ b/app/assets/javascripts/pages/projects/blame/show/index.js @@ -2,6 +2,9 @@ import initBlob from '~/pages/projects/init_blob'; import redirectToCorrectPage from '~/blame/blame_redirect'; import { renderBlamePageStreams } from '~/blame/streaming'; -redirectToCorrectPage(); -renderBlamePageStreams(window.blamePageStream); +if (new URLSearchParams(window.location.search).get('streaming')) { + renderBlamePageStreams(window.blamePageStream); +} else { + redirectToCorrectPage(); +} initBlob(); diff --git a/app/assets/javascripts/streaming/chunk_writer.js b/app/assets/javascripts/streaming/chunk_writer.js index f80fa2ff6a135a..f19be904998028 100644 --- a/app/assets/javascripts/streaming/chunk_writer.js +++ b/app/assets/javascripts/streaming/chunk_writer.js @@ -29,7 +29,8 @@ export const createChunkWriter = (write, config) => { }; const decoder = new TextDecoder('utf-8'); - let size = minChunkSize; + const averageSize = Math.round((maxChunkSize - minChunkSize + 1) / 2); + let size = Math.max(averageSize, minChunkSize); const balancer = new RenderBalancer({ lowFrameTime, @@ -75,7 +76,8 @@ export const createChunkWriter = (write, config) => { const finalizeRendering = () => { if (!accumulator) return; - write(decoder.decode(accumulator, { stream: true })); + // last chunk should have stream: false to indicate the end of stream + write(decoder.decode(accumulator, { stream: false })); }; return [handleChunk, finalizeRendering]; diff --git a/app/assets/javascripts/streaming/handle_streamed_anchor_link.js b/app/assets/javascripts/streaming/handle_streamed_anchor_link.js index 182c466eedd92e..4644a09ce200af 100644 --- a/app/assets/javascripts/streaming/handle_streamed_anchor_link.js +++ b/app/assets/javascripts/streaming/handle_streamed_anchor_link.js @@ -5,11 +5,11 @@ import LineHighlighter from '~/blob/line_highlighter'; const noop = () => {}; export function handleStreamedAnchorLink(rootElement) { - const anchor = window.location.hash; - if (!anchor || anchor === '#' || document.querySelector(anchor)) return noop; + const anchor = window.location.hash.substring(1); + if (!anchor || document.getElementById(anchor)) return noop; const handler = throttle((mutationList, instance) => { - const target = document.querySelector(anchor); + const target = document.getElementById(anchor); if (!target) return; scrollToElement(target); // eslint-disable-next-line no-new diff --git a/app/assets/javascripts/streaming/html_stream.js b/app/assets/javascripts/streaming/html_stream.js index d3d7ab4651d0f4..e2643d585b7b46 100644 --- a/app/assets/javascripts/streaming/html_stream.js +++ b/app/assets/javascripts/streaming/html_stream.js @@ -6,7 +6,7 @@ export class HtmlStream { streamDocument.write(''); const virtualStreamingElement = streamDocument.querySelector('streaming-element'); - element.appendChild(virtualStreamingElement); + element.appendChild(document.adoptNode(virtualStreamingElement)); this.streamDocument = streamDocument; } diff --git a/app/assets/javascripts/streaming/render_balancer.js b/app/assets/javascripts/streaming/render_balancer.js index 223c3b3e10b4ab..245f79733fb56f 100644 --- a/app/assets/javascripts/streaming/render_balancer.js +++ b/app/assets/javascripts/streaming/render_balancer.js @@ -11,7 +11,7 @@ export class RenderBalancer { render(fn) { return new Promise((resolve) => { const callback = (timestamp) => { - this.#throttle(timestamp); + this.throttle(timestamp); if (!fn()) requestAnimationFrame(callback); else resolve(); }; @@ -19,7 +19,7 @@ export class RenderBalancer { }); } - #throttle(timestamp) { + throttle(timestamp) { const { previousTimestamp } = this; this.previousTimestamp = timestamp; if (previousTimestamp === undefined) return; diff --git a/app/assets/javascripts/streaming/render_html_streams.js b/app/assets/javascripts/streaming/render_html_streams.js index cd0bac203df223..3e3dd063953e45 100644 --- a/app/assets/javascripts/streaming/render_html_streams.js +++ b/app/assets/javascripts/streaming/render_html_streams.js @@ -1,13 +1,16 @@ +import { ReadableStream as PolyfillReadableStream } from 'web-streams-polyfill/es6'; +import { createReadableStreamWrapper } from '@mattiasbuelens/web-streams-adapter'; import { HtmlStream } from '~/streaming/html_stream'; import { createChunkWriter } from '~/streaming/chunk_writer'; async function pipeStreams(domWriter, streamPromises) { + const toPolyfillReadable = createReadableStreamWrapper(PolyfillReadableStream); try { for await (const stream of streamPromises.slice(0, -1)) { - await stream.pipeTo(domWriter, { preventClose: true }); + await toPolyfillReadable(stream).pipeTo(domWriter, { preventClose: true }); } const stream = await streamPromises[streamPromises.length - 1]; - await stream.pipeTo(domWriter); + await toPolyfillReadable(stream).pipeTo(domWriter); } catch (error) { domWriter.abort(error); } diff --git a/locale/gitlab.pot b/locale/gitlab.pot index b317d772a47b0f..72420ec3d0aefa 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -17280,7 +17280,7 @@ msgstr "" msgid "Failed to find users for %{missing}" msgstr "" -msgid "Failed to fully load the Blame page. Try to reload the page." +msgid "Failed to fully load the blame file." msgstr "" msgid "Failed to generate export, please try again later." @@ -40160,6 +40160,9 @@ msgstr "" msgid "Show open epics" msgstr "" +msgid "Show paginated blame file" +msgstr "" + msgid "Show project milestones" msgstr "" diff --git a/package.json b/package.json index 2f6732ccc0b812..646362a49feaf2 100644 --- a/package.json +++ b/package.json @@ -59,6 +59,7 @@ "@gitlab/ui": "56.2.0", "@gitlab/visual-review-tools": "1.7.3", "@gitlab/web-ide": "0.0.1-dev-20230223005157", + "@mattiasbuelens/web-streams-adapter": "^0.1.0", "@rails/actioncable": "6.1.4-7", "@rails/ujs": "6.1.4-7", "@sourcegraph/code-host-integration": "0.0.84", @@ -197,6 +198,7 @@ "vue-virtual-scroll-list": "^1.4.7", "vuedraggable": "^2.23.0", "vuex": "^3.6.2", + "web-streams-polyfill": "^3.2.1", "web-vitals": "^0.2.4", "webpack": "^4.46.0", "webpack-bundle-analyzer": "^4.6.1", diff --git a/spec/frontend/blame/streaming/index_spec.js b/spec/frontend/blame/streaming/index_spec.js index f781becf0fefbc..3a815b0be6af2d 100644 --- a/spec/frontend/blame/streaming/index_spec.js +++ b/spec/frontend/blame/streaming/index_spec.js @@ -90,7 +90,11 @@ describe('renderBlamePageStreams', () => { } expect(createAlert).toHaveBeenCalledWith({ - message: 'Failed to fully load the Blame page. Try to reload the page.', + message: 'Failed to fully load the blame file.', + primaryButton: { + text: 'Show paginated blame file', + clickHandler: expect.any(Function), + }, }); }); }); diff --git a/yarn.lock b/yarn.lock index 80d9713aa2f43b..9279d49611fbdc 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1832,6 +1832,11 @@ resolved "https://registry.yarnpkg.com/@linaria/core/-/core-3.0.0-beta.13.tgz#049c5be5faa67e341e413a0f6b641d5d78d91056" integrity sha512-3zEi5plBCOsEzUneRVuQb+2SAx3qaC1dj0FfFAI6zIJQoDWu0dlSwKijMRack7oO9tUWrchfj3OkKQAd1LBdVg== +"@mattiasbuelens/web-streams-adapter@^0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@mattiasbuelens/web-streams-adapter/-/web-streams-adapter-0.1.0.tgz#607b5a25682f4ae2741da7ba6df39302505336b3" + integrity sha512-oV4PyZfwJNtmFWhvlJLqYIX1Nn22ML8FZpS16ZUKv0hg7414xV1fjsGqxQzLT2dyK92TKxsJSwMOd7VNHAtPmA== + "@miragejs/pretender-node-polyfill@^0.1.0": version "0.1.2" resolved "https://registry.yarnpkg.com/@miragejs/pretender-node-polyfill/-/pretender-node-polyfill-0.1.2.tgz#d26b6b7483fb70cd62189d05c95d2f67153e43f2" -- GitLab From 0c4dc174cf928609e787492b355936ec4b2a3a11 Mon Sep 17 00:00:00 2001 From: Stanislav Lashmanov Date: Sun, 12 Feb 2023 01:02:13 +0400 Subject: [PATCH 10/19] Refactor HtmlStream and ChunkWriter to use Decorator pattern --- .../javascripts/streaming/chunk_writer.js | 121 +++++++++++------- .../javascripts/streaming/html_stream.js | 10 ++ .../javascripts/streaming/render_balancer.js | 2 +- .../streaming/render_html_streams.js | 14 +- spec/frontend/streaming/chunk_writer_spec.js | 120 +++++++++++------ spec/frontend/streaming/html_stream_spec.js | 14 ++ .../streaming/render_balancer_spec.js | 4 +- .../streaming/render_html_streams_spec.js | 57 ++++----- 8 files changed, 213 insertions(+), 129 deletions(-) diff --git a/app/assets/javascripts/streaming/chunk_writer.js b/app/assets/javascripts/streaming/chunk_writer.js index f19be904998028..9571b4665f821f 100644 --- a/app/assets/javascripts/streaming/chunk_writer.js +++ b/app/assets/javascripts/streaming/chunk_writer.js @@ -22,63 +22,88 @@ function concatUint8Arrays(a, b) { return array; } -export const createChunkWriter = (write, config) => { - const { balanceRate, minChunkSize, maxChunkSize, lowFrameTime, highFrameTime } = { - ...defaultConfig, - ...config, - }; - const decoder = new TextDecoder('utf-8'); - - const averageSize = Math.round((maxChunkSize - minChunkSize + 1) / 2); - let size = Math.max(averageSize, minChunkSize); - - const balancer = new RenderBalancer({ - lowFrameTime, - highFrameTime, - decrease() { - size = Math.round(Math.max(size / balanceRate, minChunkSize)); - }, - increase() { - size = Math.round(Math.min(size * balanceRate, maxChunkSize)); - }, - }); - - let accumulator = null; - - const handleChunk = (chunk) => { - if (accumulator) { - accumulator = concatUint8Arrays(accumulator, chunk); +// This class is used to write chunks with a balanced size +// to avoid blocking main thread for too long. +// It handles both too small and too big chunks to fit into a desired size. +// The size of the chunk is determined by RenderBalancer, +// It measures execution time for each chunk write and adjusts next chunk size. +export class ChunkWriter { + accumulator = null; + decoder = new TextDecoder('utf-8'); + + constructor(htmlStream, config) { + this.htmlStream = htmlStream; + + const { balanceRate, minChunkSize, maxChunkSize, lowFrameTime, highFrameTime } = { + ...defaultConfig, + ...config, + }; + + const averageSize = Math.round((maxChunkSize - minChunkSize + 1) / 2); + this.size = Math.max(averageSize, minChunkSize); + + this.balancer = new RenderBalancer({ + lowFrameTime, + highFrameTime, + decrease: () => { + this.size = Math.round(Math.max(this.size / balanceRate, minChunkSize)); + }, + increase: () => { + this.size = Math.round(Math.min(this.size * balanceRate, maxChunkSize)); + }, + }); + } + + write(chunk) { + if (this.accumulator) { + this.accumulator = concatUint8Arrays(this.accumulator, chunk); } else { - accumulator = chunk; + this.accumulator = chunk; } - if (size > accumulator.length) return Promise.resolve(); + // accumulate chunks until the size is fulfilled + if (this.size > this.accumulator.length) return Promise.resolve(); + return this.balancedWrite(); + } + + balancedWrite() { let cursor = 0; - return balancer.render(() => { - const chunkPart = accumulator.subarray(cursor, cursor + size); - if (chunkPart.length < size) { - accumulator = chunkPart; - return true; + + return this.balancer.render(() => { + const chunkPart = this.accumulator.subarray(cursor, cursor + this.size); + // accumulate chunks until the size is fulfilled + // this is a hot path for the last chunkPart of the chunk + if (chunkPart.length < this.size) { + this.accumulator = chunkPart; + return false; } - const decoded = decoder.decode(chunkPart, { stream: true }); - write(decoded); + // stream: true allows us to split chunks with multi-part words + const decoded = this.decoder.decode(chunkPart, { stream: true }); + this.htmlStream.write(decoded); - cursor += size; - if (cursor >= accumulator.length) { - accumulator = null; - return true; + cursor += this.size; + if (cursor >= this.accumulator.length) { + this.accumulator = null; + return false; } - return false; + // continue render + return true; }); - }; + } - const finalizeRendering = () => { - if (!accumulator) return; - // last chunk should have stream: false to indicate the end of stream - write(decoder.decode(accumulator, { stream: false })); - }; + close() { + if (this.accumulator) { + // last chunk should have stream: false to indicate the end of the stream + this.htmlStream.write(this.decoder.decode(this.accumulator, { stream: false })); + this.accumulator = null; + } + this.htmlStream.close(); + } - return [handleChunk, finalizeRendering]; -}; + abort() { + this.accumulator = null; + this.htmlStream.abort(); + } +} diff --git a/app/assets/javascripts/streaming/html_stream.js b/app/assets/javascripts/streaming/html_stream.js index e2643d585b7b46..8182f69a60783d 100644 --- a/app/assets/javascripts/streaming/html_stream.js +++ b/app/assets/javascripts/streaming/html_stream.js @@ -1,3 +1,5 @@ +import { ChunkWriter } from '~/streaming/chunk_writer'; + export class HtmlStream { constructor(element) { const streamDocument = document.implementation.createHTMLDocument('stream'); @@ -11,6 +13,10 @@ export class HtmlStream { this.streamDocument = streamDocument; } + withChunkWriter(config) { + return new ChunkWriter(this, config); + } + write(chunk) { // eslint-disable-next-line no-unsanitized/method this.streamDocument.write(chunk); @@ -20,4 +26,8 @@ export class HtmlStream { this.streamDocument.write(''); this.streamDocument.close(); } + + abort() { + this.streamDocument.close(); + } } diff --git a/app/assets/javascripts/streaming/render_balancer.js b/app/assets/javascripts/streaming/render_balancer.js index 245f79733fb56f..a1222879bf8989 100644 --- a/app/assets/javascripts/streaming/render_balancer.js +++ b/app/assets/javascripts/streaming/render_balancer.js @@ -12,7 +12,7 @@ export class RenderBalancer { return new Promise((resolve) => { const callback = (timestamp) => { this.throttle(timestamp); - if (!fn()) requestAnimationFrame(callback); + if (fn()) requestAnimationFrame(callback); else resolve(); }; requestAnimationFrame(callback); diff --git a/app/assets/javascripts/streaming/render_html_streams.js b/app/assets/javascripts/streaming/render_html_streams.js index 3e3dd063953e45..551e3ef1138aee 100644 --- a/app/assets/javascripts/streaming/render_html_streams.js +++ b/app/assets/javascripts/streaming/render_html_streams.js @@ -1,7 +1,6 @@ import { ReadableStream as PolyfillReadableStream } from 'web-streams-polyfill/es6'; import { createReadableStreamWrapper } from '@mattiasbuelens/web-streams-adapter'; import { HtmlStream } from '~/streaming/html_stream'; -import { createChunkWriter } from '~/streaming/chunk_writer'; async function pipeStreams(domWriter, streamPromises) { const toPolyfillReadable = createReadableStreamWrapper(PolyfillReadableStream); @@ -19,24 +18,19 @@ async function pipeStreams(domWriter, streamPromises) { export function renderHtmlStreams(streamPromises, element, config) { if (streamPromises.length === 0) return Promise.resolve(); - const htmlStream = new HtmlStream(element); - const [handleChunk, finalizeRendering] = createChunkWriter( - (chunk) => htmlStream.write(chunk), - config, - ); + const chunkedHtmlStream = new HtmlStream(element).withChunkWriter(config); return new Promise((resolve, reject) => { const domWriter = new WritableStream({ write(chunk) { - return handleChunk(chunk); + return chunkedHtmlStream.write(chunk); }, close() { - finalizeRendering(); - htmlStream.close(); + chunkedHtmlStream.close(); resolve(); }, abort(error) { - htmlStream.close(); + chunkedHtmlStream.abort(); reject(error); }, }); diff --git a/spec/frontend/streaming/chunk_writer_spec.js b/spec/frontend/streaming/chunk_writer_spec.js index 845cc1b87a03de..d1ab9763bce18b 100644 --- a/spec/frontend/streaming/chunk_writer_spec.js +++ b/spec/frontend/streaming/chunk_writer_spec.js @@ -1,49 +1,63 @@ -import { createChunkWriter } from '~/streaming/chunk_writer'; +import { ChunkWriter } from '~/streaming/chunk_writer'; +import { RenderBalancer } from '~/streaming/render_balancer'; + +jest.mock('~/streaming/render_balancer'); describe('createChunkWriter', () => { let accumulator = ''; let write; + let close; + let abort; let config; - let frameTime; - let frameIndex; - let frameTimeDeltas; + let render; + let increase; + let decrease; const createChunk = (text) => { const encoder = new TextEncoder(); return encoder.encode(text); }; - const createWriter = () => { + const createHtmlStream = () => { write = jest.fn((part) => { accumulator += part; }); - return createChunkWriter(write, config); + close = jest.fn(); + abort = jest.fn(); + return { + write, + close, + abort, + }; + }; + + const createWriter = () => { + return new ChunkWriter(createHtmlStream(), config); }; const pushChunks = (...chunks) => { - const [writeChunk, finalize] = createWriter(); + const writer = createWriter(); chunks.forEach((chunk) => { - writeChunk(createChunk(chunk)); + writer.write(createChunk(chunk)); }); - finalize(); + writer.close(); }; beforeEach(() => { accumulator = ''; config = undefined; - frameTime = 0; - frameIndex = 0; - frameTimeDeltas = []; - jest.spyOn(window, 'requestAnimationFrame').mockImplementation((cb) => { - cb(frameTime); - frameTime += frameTimeDeltas[frameIndex] || 0; - frameIndex += 1; + render = jest.fn((cb) => { + while (cb()) { + // render until 'false' + } + }); + RenderBalancer.mockImplementation(({ increase: inc, decrease: dec }) => { + decrease = jest.fn(dec); + increase = jest.fn(inc); + return { + render, + }; }); - }); - - afterEach(() => { - window.requestAnimationFrame.mockRestore(); - jest.resetAllMocks(); }); describe('when chunk length must be "1"', () => { @@ -66,9 +80,9 @@ describe('createChunkWriter', () => { }); }); - describe('when chunk length must not be less than "10"', () => { + describe('when chunk length must not be lower than "5" and exceed "10"', () => { beforeEach(() => { - config = { minChunkSize: 5 }; + config = { minChunkSize: 5, maxChunkSize: 10 }; }); it('joins small chunks', () => { @@ -76,53 +90,85 @@ describe('createChunkWriter', () => { pushChunks(...text.split('')); expect(accumulator).toBe(text); expect(write).toHaveBeenCalledTimes(1); + expect(close).toHaveBeenCalledTimes(1); }); it('handles overflow with small chunks', () => { - const text = '123456'; + const text = '123456789'; pushChunks(...text.split('')); expect(accumulator).toBe(text); expect(write).toHaveBeenCalledTimes(2); + expect(close).toHaveBeenCalledTimes(1); }); }); describe('when frame time exceeds low limit', () => { beforeEach(() => { - frameTimeDeltas = [10]; config = { minChunkSize: 1, maxChunkSize: 5, balanceRate: 10, - lowFrameTime: 100, }; }); it('increases chunk size', () => { - const text = '1222223'; - pushChunks(...text.split('')); + const text = '111222223'; + const writer = createWriter(); + const chunk = createChunk(text); + + // size: 3 + writer.write(chunk.subarray(0, 3)); + increase(); + + // size: 5 (max) + writer.write(chunk.subarray(3)); + writer.close(); + expect(accumulator).toBe(text); - expect(write.mock.calls).toMatchObject([['1'], ['22222'], ['3']]); + expect(write.mock.calls).toMatchObject([['111'], ['22222'], ['3']]); + expect(close).toHaveBeenCalledTimes(1); }); }); describe('when frame time exceeds high limit', () => { beforeEach(() => { - // using 0 to accumulate and wait for write - frameTimeDeltas = [5, 0, 5, 0, 30, 0, 30, 0]; config = { minChunkSize: 1, - maxChunkSize: 3, + maxChunkSize: 10, balanceRate: 2, - lowFrameTime: 10, - highFrameTime: 20, }; }); it('decreases chunk size', () => { - const text = '122333221'; - pushChunks(...text.split('')); + const text = '111112223345'; + const writer = createWriter(); + const chunk = createChunk(text); + + // size: 5 + writer.write(chunk.subarray(0, 5)); + decrease(); + + // size: 3 + writer.write(chunk.subarray(5, 8)); + decrease(); + + // size: 2 + writer.write(chunk.subarray(8, 10)); + decrease(); + + // size: 1 + writer.write(chunk.subarray(10)); + writer.close(); + expect(accumulator).toBe(text); - expect(write.mock.calls).toMatchObject([['1'], ['22'], ['333'], ['22'], ['1']]); + expect(write.mock.calls).toMatchObject([['11111'], ['222'], ['33'], ['4'], ['5']]); + expect(close).toHaveBeenCalledTimes(1); }); }); + + it('calls abort on htmlStream', () => { + const writer = createWriter(); + writer.abort(); + expect(abort).toHaveBeenCalledTimes(1); + }); }); diff --git a/spec/frontend/streaming/html_stream_spec.js b/spec/frontend/streaming/html_stream_spec.js index 148acc135d7f41..115a9ddc803a2c 100644 --- a/spec/frontend/streaming/html_stream_spec.js +++ b/spec/frontend/streaming/html_stream_spec.js @@ -1,4 +1,7 @@ import { HtmlStream } from '~/streaming/html_stream'; +import { ChunkWriter } from '~/streaming/chunk_writer'; + +jest.mock('~/streaming/chunk_writer'); describe('HtmlStream', () => { let write; @@ -29,4 +32,15 @@ describe('HtmlStream', () => { expect(write.mock.calls).toEqual([[''], ['foo'], ['']]); expect(close).toHaveBeenCalledTimes(1); }); + + it('returns chunked writer', () => { + const htmlStream = new HtmlStream(document.body).withChunkWriter(); + expect(htmlStream).toBeInstanceOf(ChunkWriter); + }); + + it('closes on abort', () => { + const htmlStream = new HtmlStream(document.body); + htmlStream.abort(); + expect(close).toHaveBeenCalled(); + }); }); diff --git a/spec/frontend/streaming/render_balancer_spec.js b/spec/frontend/streaming/render_balancer_spec.js index aa960bc2014e42..dae0c98d678d88 100644 --- a/spec/frontend/streaming/render_balancer_spec.js +++ b/spec/frontend/streaming/render_balancer_spec.js @@ -27,10 +27,10 @@ describe('renderBalancer', () => { balancer.render(() => { if (counter === times) { resolve(counter); - return true; + return false; } counter += 1; - return false; + return true; }); }); }; diff --git a/spec/frontend/streaming/render_html_streams_spec.js b/spec/frontend/streaming/render_html_streams_spec.js index 0a460945045889..55cef0ea4693d4 100644 --- a/spec/frontend/streaming/render_html_streams_spec.js +++ b/spec/frontend/streaming/render_html_streams_spec.js @@ -17,54 +17,50 @@ const firstStreamContent = 'foobar'; const secondStreamContent = 'bazqux'; describe('renderHtmlStreams', () => { - let htmlAccumulator = ''; + let htmlWriter; const encoder = new TextEncoder(); const createSingleChunkStream = (chunk) => { - return new ReadableStream({ + const encoded = encoder.encode(chunk); + const stream = new ReadableStream({ pull(controller) { - controller.enqueue(encoder.encode(chunk)); + controller.enqueue(encoded); controller.close(); }, }); + return [stream, encoded]; }; beforeEach(() => { - jest.spyOn(window, 'requestAnimationFrame').mockImplementation((cb) => cb(0)); - jest.spyOn(HtmlStream.prototype, 'write').mockImplementation((chunk) => { - htmlAccumulator += chunk; - }); - }); - - afterEach(() => { - htmlAccumulator = ''; - window.requestAnimationFrame.mockRestore(); + htmlWriter = { + write: jest.fn(), + close: jest.fn(), + abort: jest.fn(), + }; + jest.spyOn(HtmlStream.prototype, 'withChunkWriter').mockReturnValue(htmlWriter); }); it('renders a single stream', async () => { - const closeSpy = jest.spyOn(HtmlStream.prototype, 'close'); - const stream = createSingleChunkStream(firstStreamContent); + const [stream, encoded] = createSingleChunkStream(firstStreamContent); await renderHtmlStreams([Promise.resolve(stream)], document.body); - expect(htmlAccumulator).toContain(firstStreamContent); - expect(closeSpy).toHaveBeenCalledTimes(1); + expect(htmlWriter.write).toHaveBeenCalledWith(encoded); + expect(htmlWriter.close).toHaveBeenCalledTimes(1); }); it('renders stream sequence', async () => { - const closeSpy = jest.spyOn(HtmlStream.prototype, 'close'); - const stream1 = createSingleChunkStream(firstStreamContent); - const stream2 = createSingleChunkStream(secondStreamContent); + const [stream1, encoded1] = createSingleChunkStream(firstStreamContent); + const [stream2, encoded2] = createSingleChunkStream(secondStreamContent); await renderHtmlStreams([Promise.resolve(stream1), Promise.resolve(stream2)], document.body); - expect(htmlAccumulator).toContain(firstStreamContent + secondStreamContent); - expect(closeSpy).toHaveBeenCalledTimes(1); + expect(htmlWriter.write.mock.calls).toMatchObject([[encoded1], [encoded2]]); + expect(htmlWriter.close).toHaveBeenCalledTimes(1); }); it("doesn't wait for the whole sequence to resolve before streaming", async () => { - const closeSpy = jest.spyOn(HtmlStream.prototype, 'close'); - const stream1 = createSingleChunkStream(firstStreamContent); - const stream2 = createSingleChunkStream(secondStreamContent); + const [stream1, encoded1] = createSingleChunkStream(firstStreamContent); + const [stream2, encoded2] = createSingleChunkStream(secondStreamContent); let res; const delayedStream = new Promise((resolve) => { @@ -75,19 +71,18 @@ describe('renderHtmlStreams', () => { await waitForPromises(); - expect(htmlAccumulator).toContain(firstStreamContent); - expect(closeSpy).toHaveBeenCalledTimes(0); + expect(htmlWriter.write.mock.calls).toMatchObject([[encoded1]]); + expect(htmlWriter.close).toHaveBeenCalledTimes(0); res(stream2); await waitForPromises(); - expect(htmlAccumulator).toContain(firstStreamContent + secondStreamContent); - expect(closeSpy).toHaveBeenCalledTimes(1); + expect(htmlWriter.write.mock.calls).toMatchObject([[encoded1], [encoded2]]); + expect(htmlWriter.close).toHaveBeenCalledTimes(1); }); it('closes HtmlStream on error', async () => { - const closeSpy = jest.spyOn(HtmlStream.prototype, 'close'); - const stream1 = createSingleChunkStream(firstStreamContent); + const [stream1] = createSingleChunkStream(firstStreamContent); const error = new Error(); try { @@ -96,6 +91,6 @@ describe('renderHtmlStreams', () => { expect(err).toBe(error); } - expect(closeSpy).toHaveBeenCalledTimes(1); + expect(htmlWriter.abort).toHaveBeenCalledTimes(1); }); }); -- GitLab From 4e1137fb2b278311b6358e798c39a383a1758dba Mon Sep 17 00:00:00 2001 From: Stanislav Lashmanov Date: Sun, 12 Feb 2023 03:45:11 +0400 Subject: [PATCH 11/19] Handle slow chunk pushes This can be caused by slow network --- .../javascripts/streaming/chunk_writer.js | 71 ++++++++++++++----- app/assets/javascripts/streaming/constants.js | 1 + app/views/projects/blame/show.html.haml | 1 - spec/frontend/__mocks__/lodash/debounce.js | 19 ++++- spec/frontend/__mocks__/lodash/throttle.js | 2 +- spec/frontend/streaming/chunk_writer_spec.js | 26 ++++++- 6 files changed, 96 insertions(+), 24 deletions(-) diff --git a/app/assets/javascripts/streaming/chunk_writer.js b/app/assets/javascripts/streaming/chunk_writer.js index 9571b4665f821f..2ab9e60bd63a91 100644 --- a/app/assets/javascripts/streaming/chunk_writer.js +++ b/app/assets/javascripts/streaming/chunk_writer.js @@ -1,3 +1,4 @@ +import { throttle } from 'lodash'; import { RenderBalancer } from '~/streaming/render_balancer'; import { BALANCE_RATE, @@ -5,6 +6,7 @@ import { LOW_FRAME_TIME, MAX_CHUNK_SIZE, MIN_CHUNK_SIZE, + TIMEOUT, } from '~/streaming/constants'; const defaultConfig = { @@ -13,6 +15,7 @@ const defaultConfig = { maxChunkSize: MAX_CHUNK_SIZE, lowFrameTime: LOW_FRAME_TIME, highFrameTime: HIGH_FRAME_TIME, + timeout: TIMEOUT, }; function concatUint8Arrays(a, b) { @@ -24,21 +27,34 @@ function concatUint8Arrays(a, b) { // This class is used to write chunks with a balanced size // to avoid blocking main thread for too long. -// It handles both too small and too big chunks to fit into a desired size. +// +// A chunk can be: +// 1. Too small +// 2. Too large +// 3. Delayed in time +// +// This class resolves all these problems by +// 1. Splitting or concatenating chunks to met the size criteria +// 2. Rendering current chunk buffer immediately if enough time has passed +// // The size of the chunk is determined by RenderBalancer, // It measures execution time for each chunk write and adjusts next chunk size. export class ChunkWriter { - accumulator = null; + buffer = null; decoder = new TextDecoder('utf-8'); + timeout = null; constructor(htmlStream, config) { this.htmlStream = htmlStream; - const { balanceRate, minChunkSize, maxChunkSize, lowFrameTime, highFrameTime } = { + const { balanceRate, minChunkSize, maxChunkSize, lowFrameTime, highFrameTime, timeout } = { ...defaultConfig, ...config, }; + // ensure we still render chunks over time if the size criteria is not met + this.scheduleAccumulatorFlush = throttle(this.flushAccumulator.bind(this), timeout); + const averageSize = Math.round((maxChunkSize - minChunkSize + 1) / 2); this.size = Math.max(averageSize, minChunkSize); @@ -55,14 +71,19 @@ export class ChunkWriter { } write(chunk) { - if (this.accumulator) { - this.accumulator = concatUint8Arrays(this.accumulator, chunk); + this.scheduleAccumulatorFlush.cancel(); + + if (this.buffer) { + this.buffer = concatUint8Arrays(this.buffer, chunk); } else { - this.accumulator = chunk; + this.buffer = chunk; } // accumulate chunks until the size is fulfilled - if (this.size > this.accumulator.length) return Promise.resolve(); + if (this.size > this.buffer.length) { + this.scheduleAccumulatorFlush(); + return Promise.resolve(); + } return this.balancedWrite(); } @@ -71,21 +92,20 @@ export class ChunkWriter { let cursor = 0; return this.balancer.render(() => { - const chunkPart = this.accumulator.subarray(cursor, cursor + this.size); + const chunkPart = this.buffer.subarray(cursor, cursor + this.size); // accumulate chunks until the size is fulfilled // this is a hot path for the last chunkPart of the chunk if (chunkPart.length < this.size) { - this.accumulator = chunkPart; + this.buffer = chunkPart; + this.scheduleAccumulatorFlush(); return false; } - // stream: true allows us to split chunks with multi-part words - const decoded = this.decoder.decode(chunkPart, { stream: true }); - this.htmlStream.write(decoded); + this.writeToDom(chunkPart); cursor += this.size; - if (cursor >= this.accumulator.length) { - this.accumulator = null; + if (cursor >= this.buffer.length) { + this.buffer = null; return false; } // continue render @@ -93,17 +113,32 @@ export class ChunkWriter { }); } + writeToDom(chunk, stream = true) { + // stream: true allows us to split chunks with multi-part words + const decoded = this.decoder.decode(chunk, { stream }); + this.htmlStream.write(decoded); + } + + flushAccumulator() { + if (this.buffer) { + this.writeToDom(this.buffer); + this.buffer = null; + } + } + close() { - if (this.accumulator) { + this.scheduleAccumulatorFlush.cancel(); + if (this.buffer) { // last chunk should have stream: false to indicate the end of the stream - this.htmlStream.write(this.decoder.decode(this.accumulator, { stream: false })); - this.accumulator = null; + this.writeToDom(this.buffer, false); + this.buffer = null; } this.htmlStream.close(); } abort() { - this.accumulator = null; + this.scheduleAccumulatorFlush.cancel(); + this.buffer = null; this.htmlStream.abort(); } } diff --git a/app/assets/javascripts/streaming/constants.js b/app/assets/javascripts/streaming/constants.js index d1b736ea4a6123..224d93a7ac1863 100644 --- a/app/assets/javascripts/streaming/constants.js +++ b/app/assets/javascripts/streaming/constants.js @@ -6,3 +6,4 @@ export const LOW_FRAME_TIME = 32; // https://web.dev/optimize-long-tasks/ export const HIGH_FRAME_TIME = 64; export const BALANCE_RATE = 1.2; +export const TIMEOUT = 500; diff --git a/app/views/projects/blame/show.html.haml b/app/views/projects/blame/show.html.haml index 2261f36454b435..921a3f5a502086 100644 --- a/app/views/projects/blame/show.html.haml +++ b/app/views/projects/blame/show.html.haml @@ -1,6 +1,5 @@ - page_title _("Blame"), @blob.path, @ref - add_page_specific_style 'page_bundles/tree' -- dataset = { testid: 'blob-content-holder', qa_selector: 'blame_file_content', per_page: @blame_per_page } - if @streaming_enabled && total_extra_pages > 0 - content_for :startup_js do = javascript_tag do diff --git a/spec/frontend/__mocks__/lodash/debounce.js b/spec/frontend/__mocks__/lodash/debounce.js index d4fe2ce54063c5..15f806fc31a1c0 100644 --- a/spec/frontend/__mocks__/lodash/debounce.js +++ b/spec/frontend/__mocks__/lodash/debounce.js @@ -9,9 +9,22 @@ // Further reference: https://github.com/facebook/jest/issues/3465 export default (fn) => { - const debouncedFn = jest.fn().mockImplementation(fn); - debouncedFn.cancel = jest.fn(); - debouncedFn.flush = jest.fn().mockImplementation(() => { + let id; + const debouncedFn = jest.fn(function run(...args) { + // this is calculated in runtime so beforeAll hook works in tests + const timeout = global.JEST_DEBOUNCE_THROTTLE_TIMEOUT; + if (timeout) { + id = setTimeout(() => { + fn.apply(this, args); + }, timeout); + } else { + fn.apply(this, args); + } + }); + debouncedFn.cancel = jest.fn(() => { + clearTimeout(id); + }); + debouncedFn.flush = jest.fn(() => { const errorMessage = "The .flush() method returned by lodash.debounce is not yet implemented/mocked by the mock in 'spec/frontend/__mocks__/lodash/debounce.js'."; diff --git a/spec/frontend/__mocks__/lodash/throttle.js b/spec/frontend/__mocks__/lodash/throttle.js index e8a82654c78a70..b1014662918029 100644 --- a/spec/frontend/__mocks__/lodash/throttle.js +++ b/spec/frontend/__mocks__/lodash/throttle.js @@ -1,4 +1,4 @@ // Similar to `lodash/debounce`, `lodash/throttle` also causes flaky specs. // See `./debounce.js` for more details. -export default (fn) => fn; +export { default } from './debounce'; diff --git a/spec/frontend/streaming/chunk_writer_spec.js b/spec/frontend/streaming/chunk_writer_spec.js index d1ab9763bce18b..7dd02a2081de69 100644 --- a/spec/frontend/streaming/chunk_writer_spec.js +++ b/spec/frontend/streaming/chunk_writer_spec.js @@ -3,7 +3,7 @@ import { RenderBalancer } from '~/streaming/render_balancer'; jest.mock('~/streaming/render_balancer'); -describe('createChunkWriter', () => { +describe('ChunkWriter', () => { let accumulator = ''; let write; let close; @@ -43,7 +43,12 @@ describe('createChunkWriter', () => { writer.close(); }; + afterAll(() => { + global.JEST_DEBOUNCE_THROTTLE_TIMEOUT = undefined; + }); + beforeEach(() => { + global.JEST_DEBOUNCE_THROTTLE_TIMEOUT = 100; accumulator = ''; config = undefined; render = jest.fn((cb) => { @@ -100,6 +105,25 @@ describe('createChunkWriter', () => { expect(write).toHaveBeenCalledTimes(2); expect(close).toHaveBeenCalledTimes(1); }); + + it('calls flush on small chunks', () => { + global.JEST_DEBOUNCE_THROTTLE_TIMEOUT = undefined; + const flushAccumulator = jest.spyOn(ChunkWriter.prototype, 'flushAccumulator'); + const text = '1'; + pushChunks(text); + expect(accumulator).toBe(text); + expect(flushAccumulator).toHaveBeenCalledTimes(1); + }); + + it('calls flush on large chunks', () => { + const flushAccumulator = jest.spyOn(ChunkWriter.prototype, 'flushAccumulator'); + const text = '1234567890123'; + const writer = createWriter(); + writer.write(createChunk(text)); + jest.runAllTimers(); + expect(accumulator).toBe(text); + expect(flushAccumulator).toHaveBeenCalledTimes(1); + }); }); describe('when frame time exceeds low limit', () => { -- GitLab From 388e448ddf66cb2159ef10099d04f48f9901b140 Mon Sep 17 00:00:00 2001 From: Stanislav Lashmanov Date: Mon, 20 Feb 2023 17:06:23 +0400 Subject: [PATCH 12/19] Adjust blame loading icon position and size --- app/views/projects/blame/show.html.haml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/views/projects/blame/show.html.haml b/app/views/projects/blame/show.html.haml index 921a3f5a502086..fd341017f6b030 100644 --- a/app/views/projects/blame/show.html.haml +++ b/app/views/projects/blame/show.html.haml @@ -46,9 +46,9 @@ - if @streaming_enabled #blame-stream-loading.blame-stream-loading .gradient + = gl_loading_icon(size: 'sm') %span.gl-mx-2 = _('Loading blame file...') - = gl_loading_icon(size: 'md') - if @blame_pagination = paginate(@blame_pagination, theme: "gitlab") -- GitLab From 7688c0e70d1eb42b5b489c3877cd9cf3809ff6e5 Mon Sep 17 00:00:00 2001 From: Stanislav Lashmanov Date: Mon, 20 Feb 2023 23:39:31 +0400 Subject: [PATCH 13/19] Add rate limiting to stream requests Lower streaming page size from 10000 to 2000 lines per page Move Web Streams polyfill to blame page initializer --- .../javascripts/blame/streaming/index.js | 20 +++-- .../helpers/throttle_promises_helper.js | 50 +++++++++++ app/assets/javascripts/streaming/polyfills.js | 5 ++ .../streaming/rate_limit_stream_requests.js | 84 +++++++++++++++++++ .../streaming/render_html_streams.js | 10 +-- app/services/projects/blame_service.rb | 2 +- spec/frontend/blame/streaming/index_spec.js | 10 +++ 7 files changed, 170 insertions(+), 11 deletions(-) create mode 100644 app/assets/javascripts/helpers/throttle_promises_helper.js create mode 100644 app/assets/javascripts/streaming/polyfills.js create mode 100644 app/assets/javascripts/streaming/rate_limit_stream_requests.js diff --git a/app/assets/javascripts/blame/streaming/index.js b/app/assets/javascripts/blame/streaming/index.js index 2f8c77df29d392..436a4daf4948e6 100644 --- a/app/assets/javascripts/blame/streaming/index.js +++ b/app/assets/javascripts/blame/streaming/index.js @@ -2,6 +2,8 @@ import { renderHtmlStreams } from '~/streaming/render_html_streams'; import { handleStreamedAnchorLink } from '~/streaming/handle_streamed_anchor_link'; import { createAlert } from '~/flash'; import { __ } from '~/locale'; +import { rateLimitStreamRequests } from '~/streaming/rate_limit_stream_requests'; +import { toPolyfillReadable } from '~/streaming/polyfills'; export async function renderBlamePageStreams(firstStreamPromise) { const element = document.querySelector('#blame-stream-container'); @@ -13,14 +15,22 @@ export async function renderBlamePageStreams(firstStreamPromise) { const totalExtraPages = parseInt(dataset.totalExtraPages, 10); const { pagesUrl } = dataset; - const remainingStreams = Array.from({ length: totalExtraPages }, (v, i) => { - const url = new URL(pagesUrl); - url.searchParams.set('page', i + 3); - return fetch(url).then((response) => response.body); + const remainingStreams = rateLimitStreamRequests({ + factory: (index) => { + const url = new URL(pagesUrl); + url.searchParams.set('page', index + 3); + return fetch(url).then((response) => toPolyfillReadable(response.body)); + }, + immediateCount: 2, + maxConcurrentRequests: 5, + total: totalExtraPages, }); try { - await renderHtmlStreams([firstStreamPromise, ...remainingStreams], element); + await renderHtmlStreams( + [firstStreamPromise.then(toPolyfillReadable), ...remainingStreams], + element, + ); } catch (error) { createAlert({ message: __('Failed to fully load the blame file.'), diff --git a/app/assets/javascripts/helpers/throttle_promises_helper.js b/app/assets/javascripts/helpers/throttle_promises_helper.js new file mode 100644 index 00000000000000..e0234bbb37f529 --- /dev/null +++ b/app/assets/javascripts/helpers/throttle_promises_helper.js @@ -0,0 +1,50 @@ +const wait = (timeout) => + new Promise((resolve) => { + setTimeout(resolve, timeout); + }); + +export const throttlePromises = ({ + factory, + immediateLength = 1, + length, + maxPromises = length, + timeout, +}) => { + const unsettled = []; + const pushUnsettled = (promise) => { + unsettled.push(promise); + // eslint-disable-next-line promise/catch-or-return + promise.then(() => { + unsettled.splice(unsettled.indexOf(promise), 1); + }); + }; + const immediate = Array.from({ length: immediateLength }, (_, i) => { + const promise = factory(i); + pushUnsettled(promise); + return promise; + }); + const queue = []; + const flushQueue = () => { + const promises = unsettled.length > maxPromises ? unsettled : [...unsettled, wait(timeout)]; + // eslint-disable-next-line promise/catch-or-return + Promise.race(promises).then(() => { + const cb = queue.shift(); + cb?.(); + if (queue.length !== 0) { + // wait for callback promise to be removed from unsettled + queueMicrotask(flushQueue); + } + }); + }; + const throttled = Array.from({ length: length - immediateLength }, (_, i) => { + return new Promise((resolve) => { + queue.push(() => { + const promise = factory(i + immediateLength); + pushUnsettled(promise); + resolve(promise); + }); + }); + }); + flushQueue(); + return [...immediate, ...throttled]; +}; diff --git a/app/assets/javascripts/streaming/polyfills.js b/app/assets/javascripts/streaming/polyfills.js new file mode 100644 index 00000000000000..a9a044a3e99c69 --- /dev/null +++ b/app/assets/javascripts/streaming/polyfills.js @@ -0,0 +1,5 @@ +import { createReadableStreamWrapper } from '@mattiasbuelens/web-streams-adapter'; +import { ReadableStream as PolyfillReadableStream } from 'web-streams-polyfill'; + +// TODO: remove this when our WebStreams API reaches 100% support +export const toPolyfillReadable = createReadableStreamWrapper(PolyfillReadableStream); diff --git a/app/assets/javascripts/streaming/rate_limit_stream_requests.js b/app/assets/javascripts/streaming/rate_limit_stream_requests.js new file mode 100644 index 00000000000000..f54745123acf7e --- /dev/null +++ b/app/assets/javascripts/streaming/rate_limit_stream_requests.js @@ -0,0 +1,84 @@ +const consumeReadableStream = (stream) => { + return new Promise((resolve, reject) => { + stream.pipeTo( + new WritableStream({ + close: resolve, + abort: reject, + }), + ); + }); +}; + +const wait = (timeout) => + new Promise((resolve) => { + setTimeout(resolve, timeout); + }); + +// this rate-limiting approach is specific to Web Streams +// because streams only resolve when they're fully consumed +// we need to split each stream into two pieces: +// one for the rate-limiter (wait for all the bytes to be sent) +// another for the original consumer +export const rateLimitStreamRequests = ({ + factory, + total, + immediateCount, + maxConcurrentRequests, + timeout = 0, +}) => { + const unsettled = []; + + const pushUnsettled = (promise) => { + let res; + let rej; + const consume = new Promise((resolve, reject) => { + res = resolve; + rej = reject; + }); + unsettled.push(consume); + return promise.then((stream) => { + const [first, second] = stream.tee(); + // eslint-disable-next-line promise/no-nesting + consumeReadableStream(first) + .then(() => { + unsettled.splice(unsettled.indexOf(consume), 1); + res(); + }) + .catch(rej); + return second; + }, rej); + }; + + const immediate = Array.from({ length: immediateCount }, (_, i) => pushUnsettled(factory(i))); + + const queue = []; + const flushQueue = () => { + const promises = + unsettled.length > maxConcurrentRequests ? unsettled : [...unsettled, wait(timeout)]; + Promise.race(promises) + .then(() => { + const cb = queue.shift(); + cb?.(); + if (queue.length !== 0) { + // wait for stream consumer promise to be removed from unsettled + queueMicrotask(flushQueue); + } + }) + // errors are handled by the caller + .catch(() => {}); + }; + + const throttled = Array.from({ length: total - immediateCount }, (_, i) => { + return new Promise((resolve, reject) => { + queue.push(() => { + pushUnsettled(factory(i + immediateCount)) + .then(resolve) + .catch(reject); + }); + }); + }); + + flushQueue(); + + return [...immediate, ...throttled]; +}; diff --git a/app/assets/javascripts/streaming/render_html_streams.js b/app/assets/javascripts/streaming/render_html_streams.js index 551e3ef1138aee..7201e541777c8d 100644 --- a/app/assets/javascripts/streaming/render_html_streams.js +++ b/app/assets/javascripts/streaming/render_html_streams.js @@ -1,20 +1,20 @@ -import { ReadableStream as PolyfillReadableStream } from 'web-streams-polyfill/es6'; -import { createReadableStreamWrapper } from '@mattiasbuelens/web-streams-adapter'; import { HtmlStream } from '~/streaming/html_stream'; async function pipeStreams(domWriter, streamPromises) { - const toPolyfillReadable = createReadableStreamWrapper(PolyfillReadableStream); try { for await (const stream of streamPromises.slice(0, -1)) { - await toPolyfillReadable(stream).pipeTo(domWriter, { preventClose: true }); + await stream.pipeTo(domWriter, { preventClose: true }); } const stream = await streamPromises[streamPromises.length - 1]; - await toPolyfillReadable(stream).pipeTo(domWriter); + await stream.pipeTo(domWriter); } catch (error) { domWriter.abort(error); } } +// this function (and the rest of the pipeline) expects polyfilled streams +// do not pass native streams here unless our browser support allows for it +// TODO: remove this notice when our WebStreams API support reaches 100% export function renderHtmlStreams(streamPromises, element, config) { if (streamPromises.length === 0) return Promise.resolve(); diff --git a/app/services/projects/blame_service.rb b/app/services/projects/blame_service.rb index d482bad6275983..3fdf08ce317cd3 100644 --- a/app/services/projects/blame_service.rb +++ b/app/services/projects/blame_service.rb @@ -6,7 +6,7 @@ module Projects class BlameService PER_PAGE = 1000 STREAMING_FIRST_PAGE_SIZE = 200 - STREAMING_PER_PAGE = 10000 + STREAMING_PER_PAGE = 2000 def initialize(blob, commit, params) @blob = blob diff --git a/spec/frontend/blame/streaming/index_spec.js b/spec/frontend/blame/streaming/index_spec.js index 3a815b0be6af2d..bf6c88358fba8a 100644 --- a/spec/frontend/blame/streaming/index_spec.js +++ b/spec/frontend/blame/streaming/index_spec.js @@ -2,11 +2,15 @@ import waitForPromises from 'helpers/wait_for_promises'; import { renderBlamePageStreams } from '~/blame/streaming'; import { setHTMLFixture } from 'helpers/fixtures'; import { renderHtmlStreams } from '~/streaming/render_html_streams'; +import { rateLimitStreamRequests } from '~/streaming/rate_limit_stream_requests'; import { handleStreamedAnchorLink } from '~/streaming/handle_streamed_anchor_link'; +import { toPolyfillReadable } from '~/streaming/polyfills'; import { createAlert } from '~/flash'; jest.mock('~/streaming/render_html_streams'); +jest.mock('~/streaming/rate_limit_stream_requests'); jest.mock('~/streaming/handle_streamed_anchor_link'); +jest.mock('~/streaming/polyfills'); jest.mock('~/sentry'); jest.mock('~/flash'); @@ -30,6 +34,12 @@ describe('renderBlamePageStreams', () => { }; handleStreamedAnchorLink.mockImplementation(() => stopAnchor); + rateLimitStreamRequests.mockImplementation(({ factory, total }) => { + return Array.from({ length: total }, (_, i) => { + return Promise.resolve(factory(i)); + }); + }); + toPolyfillReadable.mockImplementation((obj) => obj); beforeEach(() => { stopAnchor = jest.fn(); -- GitLab From 410f955cee29ecc1626cb4e5c91c5db3af7d9125 Mon Sep 17 00:00:00 2001 From: Stanislav Lashmanov Date: Tue, 21 Feb 2023 00:36:29 +0400 Subject: [PATCH 14/19] Change blame page phrases --- .../javascripts/blame/streaming/index.js | 4 ++-- app/views/projects/blame/show.html.haml | 4 ++-- locale/gitlab.pot | 24 +++++++++---------- spec/features/projects/blobs/blame_spec.rb | 14 +++++------ spec/frontend/blame/streaming/index_spec.js | 4 ++-- 5 files changed, 25 insertions(+), 25 deletions(-) diff --git a/app/assets/javascripts/blame/streaming/index.js b/app/assets/javascripts/blame/streaming/index.js index 436a4daf4948e6..eacbd15c76bfa1 100644 --- a/app/assets/javascripts/blame/streaming/index.js +++ b/app/assets/javascripts/blame/streaming/index.js @@ -33,9 +33,9 @@ export async function renderBlamePageStreams(firstStreamPromise) { ); } catch (error) { createAlert({ - message: __('Failed to fully load the blame file.'), + message: __('Blame could not be loaded as a single page.'), primaryButton: { - text: __('Show paginated blame file'), + text: __('View blame as separate pages'), clickHandler() { const newUrl = new URL(window.location); newUrl.searchParams.delete('streaming'); diff --git a/app/views/projects/blame/show.html.haml b/app/views/projects/blame/show.html.haml index fd341017f6b030..ee7ca9cd351666 100644 --- a/app/views/projects/blame/show.html.haml +++ b/app/views/projects/blame/show.html.haml @@ -41,14 +41,14 @@ - if @blame_pagination && @blame_pagination.total_pages > 1 .gl-display-flex.gl-justify-content-center.gl-flex-direction-column.gl-align-items-center.gl-p-3.gl-bg-gray-50.gl-border-t-solid.gl-border-t-1.gl-border-gray-100 = render Pajamas::ButtonComponent.new(href: @entire_blame_path, size: :small, button_options: { class: 'gl-mt-3' }) do |c| - = _('View entire blame') + = _('Show full blame') - if @streaming_enabled #blame-stream-loading.blame-stream-loading .gradient = gl_loading_icon(size: 'sm') %span.gl-mx-2 - = _('Loading blame file...') + = _('Loading full blame...') - if @blame_pagination = paginate(@blame_pagination, theme: "gitlab") diff --git a/locale/gitlab.pot b/locale/gitlab.pot index 72420ec3d0aefa..e9a4b02bb97ea1 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -6780,6 +6780,9 @@ msgstr "" msgid "Blame" msgstr "" +msgid "Blame could not be loaded as a single page." +msgstr "" + msgid "BlobViewer|View on %{environmentName}" msgstr "" @@ -17280,9 +17283,6 @@ msgstr "" msgid "Failed to find users for %{missing}" msgstr "" -msgid "Failed to fully load the blame file." -msgstr "" - msgid "Failed to generate export, please try again later." msgstr "" @@ -25667,15 +25667,15 @@ msgstr "" msgid "Loading %{name}" msgstr "" -msgid "Loading blame file..." -msgstr "" - msgid "Loading contribution stats for group members" msgstr "" msgid "Loading files, directories, and submodules in the path %{path} for commit reference %{ref}" msgstr "" +msgid "Loading full blame..." +msgstr "" + msgid "Loading more" msgstr "" @@ -40139,6 +40139,9 @@ msgstr "" msgid "Show filters" msgstr "" +msgid "Show full blame" +msgstr "" + msgid "Show group milestones" msgstr "" @@ -40160,9 +40163,6 @@ msgstr "" msgid "Show open epics" msgstr "" -msgid "Show paginated blame file" -msgstr "" - msgid "Show project milestones" msgstr "" @@ -47387,6 +47387,9 @@ msgstr "" msgid "View blame" msgstr "" +msgid "View blame as separate pages" +msgstr "" + msgid "View blame prior to this change" msgstr "" @@ -47416,9 +47419,6 @@ msgstr "" msgid "View eligible approvers" msgstr "" -msgid "View entire blame" -msgstr "" - msgid "View exposed artifact" msgid_plural "View %d exposed artifacts" msgstr[0] "" diff --git a/spec/features/projects/blobs/blame_spec.rb b/spec/features/projects/blobs/blame_spec.rb index 7bba94038d6d42..a7bd5b2f212a49 100644 --- a/spec/features/projects/blobs/blame_spec.rb +++ b/spec/features/projects/blobs/blame_spec.rb @@ -38,7 +38,7 @@ def visit_blob_blame(path) within '[data-testid="blob-content-holder"]' do expect(page).to have_css('.blame-commit') expect(page).not_to have_css('.gl-pagination') - expect(page).not_to have_link _('View entire blame') + expect(page).not_to have_link _('Show full blame') end end @@ -53,7 +53,7 @@ def visit_blob_blame(path) within '[data-testid="blob-content-holder"]' do expect(page).to have_css('.blame-commit') expect(page).to have_css('.gl-pagination') - expect(page).to have_link _('View entire blame') + expect(page).to have_link _('Show full blame') expect(page).to have_css('#L1') expect(page).not_to have_css('#L3') @@ -86,10 +86,10 @@ def visit_blob_blame(path) end shared_examples 'a full blame page' do - context 'when user clicks on View entire blame button' do + context 'when user clicks on Show full blame button' do before do visit_blob_blame(path) - click_link _('View entire blame') + click_link _('Show full blame') end it 'displays the blame page without pagination' do @@ -120,8 +120,8 @@ def visit_blob_blame(path) it 'shows loading text' do visit_blob_blame(path) - click_link _('View entire blame') - expect(page).to have_text('Loading blame file...') + click_link _('Show full blame') + expect(page).to have_text('Loading full blame...') end end @@ -136,7 +136,7 @@ def visit_blob_blame(path) within '[data-testid="blob-content-holder"]' do expect(page).to have_css('.blame-commit') expect(page).not_to have_css('.gl-pagination') - expect(page).not_to have_link _('View entire blame') + expect(page).not_to have_link _('Show full blame') end end end diff --git a/spec/frontend/blame/streaming/index_spec.js b/spec/frontend/blame/streaming/index_spec.js index bf6c88358fba8a..a5069f8a7d868b 100644 --- a/spec/frontend/blame/streaming/index_spec.js +++ b/spec/frontend/blame/streaming/index_spec.js @@ -100,9 +100,9 @@ describe('renderBlamePageStreams', () => { } expect(createAlert).toHaveBeenCalledWith({ - message: 'Failed to fully load the blame file.', + message: 'Blame could not be loaded as a single page.', primaryButton: { - text: 'Show paginated blame file', + text: 'View blame as separate pages', clickHandler: expect.any(Function), }, }); -- GitLab From 45e58dbcb46c7802da4049bd7dbcd7cb5d25e13d Mon Sep 17 00:00:00 2001 From: Stanislav Lashmanov Date: Tue, 21 Feb 2023 02:23:18 +0400 Subject: [PATCH 15/19] Remove leftover helper --- .../helpers/throttle_promises_helper.js | 50 ------------------- 1 file changed, 50 deletions(-) delete mode 100644 app/assets/javascripts/helpers/throttle_promises_helper.js diff --git a/app/assets/javascripts/helpers/throttle_promises_helper.js b/app/assets/javascripts/helpers/throttle_promises_helper.js deleted file mode 100644 index e0234bbb37f529..00000000000000 --- a/app/assets/javascripts/helpers/throttle_promises_helper.js +++ /dev/null @@ -1,50 +0,0 @@ -const wait = (timeout) => - new Promise((resolve) => { - setTimeout(resolve, timeout); - }); - -export const throttlePromises = ({ - factory, - immediateLength = 1, - length, - maxPromises = length, - timeout, -}) => { - const unsettled = []; - const pushUnsettled = (promise) => { - unsettled.push(promise); - // eslint-disable-next-line promise/catch-or-return - promise.then(() => { - unsettled.splice(unsettled.indexOf(promise), 1); - }); - }; - const immediate = Array.from({ length: immediateLength }, (_, i) => { - const promise = factory(i); - pushUnsettled(promise); - return promise; - }); - const queue = []; - const flushQueue = () => { - const promises = unsettled.length > maxPromises ? unsettled : [...unsettled, wait(timeout)]; - // eslint-disable-next-line promise/catch-or-return - Promise.race(promises).then(() => { - const cb = queue.shift(); - cb?.(); - if (queue.length !== 0) { - // wait for callback promise to be removed from unsettled - queueMicrotask(flushQueue); - } - }); - }; - const throttled = Array.from({ length: length - immediateLength }, (_, i) => { - return new Promise((resolve) => { - queue.push(() => { - const promise = factory(i + immediateLength); - pushUnsettled(promise); - resolve(promise); - }); - }); - }); - flushQueue(); - return [...immediate, ...throttled]; -}; -- GitLab From bc0452bedafbf4e243bf777de702b6c562aa1902 Mon Sep 17 00:00:00 2001 From: Stanislav Lashmanov Date: Tue, 21 Feb 2023 05:35:43 +0400 Subject: [PATCH 16/19] Test rateLimitStreamRequests --- .../javascripts/blame/streaming/index.js | 1 - .../streaming/rate_limit_stream_requests.js | 4 +- .../rate_limit_stream_requests_spec.js | 133 ++++++++++++++++++ 3 files changed, 135 insertions(+), 3 deletions(-) create mode 100644 spec/frontend/streaming/rate_limit_stream_requests_spec.js diff --git a/app/assets/javascripts/blame/streaming/index.js b/app/assets/javascripts/blame/streaming/index.js index eacbd15c76bfa1..48c74610c5491f 100644 --- a/app/assets/javascripts/blame/streaming/index.js +++ b/app/assets/javascripts/blame/streaming/index.js @@ -21,7 +21,6 @@ export async function renderBlamePageStreams(firstStreamPromise) { url.searchParams.set('page', index + 3); return fetch(url).then((response) => toPolyfillReadable(response.body)); }, - immediateCount: 2, maxConcurrentRequests: 5, total: totalExtraPages, }); diff --git a/app/assets/javascripts/streaming/rate_limit_stream_requests.js b/app/assets/javascripts/streaming/rate_limit_stream_requests.js index f54745123acf7e..c299ec39fcbcaf 100644 --- a/app/assets/javascripts/streaming/rate_limit_stream_requests.js +++ b/app/assets/javascripts/streaming/rate_limit_stream_requests.js @@ -16,14 +16,14 @@ const wait = (timeout) => // this rate-limiting approach is specific to Web Streams // because streams only resolve when they're fully consumed -// we need to split each stream into two pieces: +// so we need to split each stream into two pieces: // one for the rate-limiter (wait for all the bytes to be sent) // another for the original consumer export const rateLimitStreamRequests = ({ factory, total, - immediateCount, maxConcurrentRequests, + immediateCount = maxConcurrentRequests, timeout = 0, }) => { const unsettled = []; diff --git a/spec/frontend/streaming/rate_limit_stream_requests_spec.js b/spec/frontend/streaming/rate_limit_stream_requests_spec.js new file mode 100644 index 00000000000000..6544aa46a8fa21 --- /dev/null +++ b/spec/frontend/streaming/rate_limit_stream_requests_spec.js @@ -0,0 +1,133 @@ +import waitForPromises from 'helpers/wait_for_promises'; +import { rateLimitStreamRequests } from '~/streaming/rate_limit_stream_requests'; + +describe('rateLimitStreamRequests', () => { + const encoder = new TextEncoder('utf-8'); + const createStreamResponse = (content = 'foo') => + new ReadableStream({ + pull(controller) { + controller.enqueue(encoder.encode(content)); + controller.close(); + }, + }); + + const createFactory = (content) => { + return jest.fn(() => { + return Promise.resolve(createStreamResponse(content)); + }); + }; + + it('creates immediate requests', () => { + const factory = createFactory(); + const requests = rateLimitStreamRequests({ + factory, + maxConcurrentRequests: 2, + total: 2, + }); + expect(factory).toHaveBeenCalledTimes(2); + expect(requests.length).toBe(2); + }); + + it('returns correct values', async () => { + const fixture = 'foobar'; + const factory = createFactory(fixture); + const requests = rateLimitStreamRequests({ + factory, + maxConcurrentRequests: 2, + total: 2, + }); + + const decoder = new TextDecoder('utf-8'); + let result = ''; + for await (const stream of requests) { + await stream.pipeTo( + new WritableStream({ + // eslint-disable-next-line no-loop-func + write(content) { + result += decoder.decode(content); + }, + }), + ); + } + + expect(result).toBe(fixture + fixture); + }); + + it('delays rate limited requests', async () => { + const factory = createFactory(); + const requests = rateLimitStreamRequests({ + factory, + maxConcurrentRequests: 2, + total: 3, + }); + expect(factory).toHaveBeenCalledTimes(2); + expect(requests.length).toBe(3); + + await waitForPromises(); + + expect(factory).toHaveBeenCalledTimes(3); + }); + + it('runs next request after previous has been fulfilled', async () => { + let res; + const factory = jest + .fn() + .mockImplementationOnce( + () => + new Promise((resolve) => { + res = resolve; + }), + ) + .mockImplementationOnce(() => Promise.resolve(createStreamResponse())); + const requests = rateLimitStreamRequests({ + factory, + maxConcurrentRequests: 1, + total: 2, + }); + expect(factory).toHaveBeenCalledTimes(1); + expect(requests.length).toBe(2); + + await waitForPromises(); + + expect(factory).toHaveBeenCalledTimes(1); + + res(createStreamResponse()); + + await waitForPromises(); + + expect(factory).toHaveBeenCalledTimes(2); + }); + + it('uses timer to schedule next request', async () => { + let res; + const factory = jest + .fn() + .mockImplementationOnce( + () => + new Promise((resolve) => { + res = resolve; + }), + ) + .mockImplementationOnce(() => Promise.resolve(createStreamResponse())); + const requests = rateLimitStreamRequests({ + factory, + immediateCount: 1, + maxConcurrentRequests: 2, + total: 2, + timeout: 9999, + }); + expect(factory).toHaveBeenCalledTimes(1); + expect(requests.length).toBe(2); + + await waitForPromises(); + + expect(factory).toHaveBeenCalledTimes(1); + + jest.runAllTimers(); + + await waitForPromises(); + + expect(factory).toHaveBeenCalledTimes(2); + res(createStreamResponse()); + }); +}); -- GitLab From f8fb36a2b91167aecb64573e354224537917e5d6 Mon Sep 17 00:00:00 2001 From: Stanislav Lashmanov Date: Wed, 22 Feb 2023 20:52:48 +0400 Subject: [PATCH 17/19] Refactor blame controller and service Add a rollout link Change feature flag milestone --- app/controllers/projects/blame_controller.rb | 2 +- app/services/projects/blame_service.rb | 24 +++++++++---------- .../development/blame_page_streaming.yml | 4 ++-- 3 files changed, 14 insertions(+), 16 deletions(-) diff --git a/app/controllers/projects/blame_controller.rb b/app/controllers/projects/blame_controller.rb index ae810305d1cdaf..d41b347dc5aa87 100644 --- a/app/controllers/projects/blame_controller.rb +++ b/app/controllers/projects/blame_controller.rb @@ -34,7 +34,7 @@ def show @entire_blame_path = full_blame_path(streaming: true) end - @streaming_enabled = blame_service.streaming_state(permitted_params) + @streaming_enabled = blame_service.streaming_enabled @blame_pagination = blame_service.pagination unless @streaming_enabled @blame_per_page = blame_service.per_page diff --git a/app/services/projects/blame_service.rb b/app/services/projects/blame_service.rb index 3fdf08ce317cd3..bed574d8fcb967 100644 --- a/app/services/projects/blame_service.rb +++ b/app/services/projects/blame_service.rb @@ -16,7 +16,7 @@ def initialize(blob, commit, params) @streaming_enabled = streaming_state(params) end - attr_reader :page + attr_reader :page, :streaming_enabled def blame Gitlab::Blame.new(blob, commit, range: blame_range) @@ -31,14 +31,12 @@ def pagination end def per_page - return STREAMING_PER_PAGE if @streaming_enabled - - PER_PAGE + streaming_enabled ? STREAMING_PER_PAGE : PER_PAGE end def total_extra_pages total = blob_lines_count - total = blob_lines_count - STREAMING_FIRST_PAGE_SIZE + per_page if @streaming_enabled + total = blob_lines_count - STREAMING_FIRST_PAGE_SIZE + per_page if streaming_enabled (total / per_page).ceil end @@ -47,22 +45,16 @@ def streaming_possible Feature.enabled?(:blame_page_streaming, commit.project) end - def streaming_state(params) - return false unless streaming_possible - - Gitlab::Utils.to_boolean(params[:streaming], default: false) - end - private attr_reader :blob, :commit, :pagination_enabled def blame_range - return unless pagination_enabled || @streaming_enabled + return unless pagination_enabled || streaming_enabled first_line = (page - 1) * per_page + 1 - if @streaming_enabled + if streaming_enabled return 1..STREAMING_FIRST_PAGE_SIZE if page == 1 first_line = STREAMING_FIRST_PAGE_SIZE + (page - 2) * per_page + 1 @@ -81,6 +73,12 @@ def extract_page(params) page end + def streaming_state(params) + return false unless streaming_possible + + Gitlab::Utils.to_boolean(params[:streaming], default: false) + end + def pagination_state(params) return false if Gitlab::Utils.to_boolean(params[:no_pagination], default: false) diff --git a/config/feature_flags/development/blame_page_streaming.yml b/config/feature_flags/development/blame_page_streaming.yml index 982278c4d3a2d5..44d64800dabe62 100644 --- a/config/feature_flags/development/blame_page_streaming.yml +++ b/config/feature_flags/development/blame_page_streaming.yml @@ -1,8 +1,8 @@ --- name: blame_page_streaming introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/110208 -rollout_issue_url: -milestone: '15.9' +rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/392890 +milestone: '15.10' type: development group: group::source code default_enabled: false -- GitLab From c3a7fac9477880764765d8793cb25d5f74743a67 Mon Sep 17 00:00:00 2001 From: Stanislav Lashmanov Date: Wed, 22 Feb 2023 21:19:57 +0400 Subject: [PATCH 18/19] Add more explainers to the frontend code Fix implementation Refactor CSS Improve tests --- .../javascripts/blame/streaming/index.js | 6 ++++++ .../streaming/rate_limit_stream_requests.js | 21 +++++++++---------- .../javascripts/streaming/render_balancer.js | 2 +- app/assets/stylesheets/framework/files.scss | 2 +- .../handle_streamed_anchor_link_spec.js | 3 ++- 5 files changed, 20 insertions(+), 14 deletions(-) diff --git a/app/assets/javascripts/blame/streaming/index.js b/app/assets/javascripts/blame/streaming/index.js index 48c74610c5491f..a74e01b6423d61 100644 --- a/app/assets/javascripts/blame/streaming/index.js +++ b/app/assets/javascripts/blame/streaming/index.js @@ -18,9 +18,15 @@ export async function renderBlamePageStreams(firstStreamPromise) { const remainingStreams = rateLimitStreamRequests({ factory: (index) => { const url = new URL(pagesUrl); + // page numbers start with 1 + // the first page is already rendered in the document + // the second page is passed with the 'firstStreamPromise' url.searchParams.set('page', index + 3); return fetch(url).then((response) => toPolyfillReadable(response.body)); }, + // we don't want to overload gitaly with concurrent requests + // https://gitlab.com/gitlab-org/gitlab/-/issues/391842#note_1281695095 + // using 5 as a good starting point maxConcurrentRequests: 5, total: totalExtraPages, }); diff --git a/app/assets/javascripts/streaming/rate_limit_stream_requests.js b/app/assets/javascripts/streaming/rate_limit_stream_requests.js index c299ec39fcbcaf..a2f721633b4ed5 100644 --- a/app/assets/javascripts/streaming/rate_limit_stream_requests.js +++ b/app/assets/javascripts/streaming/rate_limit_stream_requests.js @@ -55,17 +55,16 @@ export const rateLimitStreamRequests = ({ const flushQueue = () => { const promises = unsettled.length > maxConcurrentRequests ? unsettled : [...unsettled, wait(timeout)]; - Promise.race(promises) - .then(() => { - const cb = queue.shift(); - cb?.(); - if (queue.length !== 0) { - // wait for stream consumer promise to be removed from unsettled - queueMicrotask(flushQueue); - } - }) - // errors are handled by the caller - .catch(() => {}); + // errors are handled by the caller + // eslint-disable-next-line promise/catch-or-return + Promise.race(promises).then(() => { + const cb = queue.shift(); + cb?.(); + if (queue.length !== 0) { + // wait for stream consumer promise to be removed from unsettled + queueMicrotask(flushQueue); + } + }); }; const throttled = Array.from({ length: total - immediateCount }, (_, i) => { diff --git a/app/assets/javascripts/streaming/render_balancer.js b/app/assets/javascripts/streaming/render_balancer.js index a1222879bf8989..66929ff3a54ad4 100644 --- a/app/assets/javascripts/streaming/render_balancer.js +++ b/app/assets/javascripts/streaming/render_balancer.js @@ -27,7 +27,7 @@ export class RenderBalancer { const duration = Math.round(timestamp - previousTimestamp); if (!duration) return; - if (duration > this.highFrameTime) { + if (duration >= this.highFrameTime) { this.decrease(); } else if (duration < this.lowFrameTime) { this.increase(); diff --git a/app/assets/stylesheets/framework/files.scss b/app/assets/stylesheets/framework/files.scss index 12845effbce2b3..b292adf9eac524 100644 --- a/app/assets/stylesheets/framework/files.scss +++ b/app/assets/stylesheets/framework/files.scss @@ -582,7 +582,7 @@ span.idiff { } .blame-stream-container { - border-top: 1px solid $gray-darker; + border-top: 1px solid $border-color; } .blame-stream-loading { diff --git a/spec/frontend/streaming/handle_streamed_anchor_link_spec.js b/spec/frontend/streaming/handle_streamed_anchor_link_spec.js index aa6dd0c6ffa5cf..f20bfa6c37d6f8 100644 --- a/spec/frontend/streaming/handle_streamed_anchor_link_spec.js +++ b/spec/frontend/streaming/handle_streamed_anchor_link_spec.js @@ -3,6 +3,7 @@ import waitForPromises from 'helpers/wait_for_promises'; import { handleStreamedAnchorLink } from '~/streaming/handle_streamed_anchor_link'; import { scrollToElement } from '~/lib/utils/common_utils'; import LineHighlighter from '~/blob/line_highlighter'; +import { TEST_HOST } from 'spec/test_constants'; jest.mock('~/lib/utils/common_utils'); jest.mock('~/blob/line_highlighter'); @@ -18,7 +19,7 @@ describe('handleStreamedAnchorLink', () => { describe('when anchor is given', () => { beforeEach(() => { delete window.location; - window.location = new URL(`https://www.example.com#${anchorName}`); + window.location = new URL(`${TEST_HOST}#${anchorName}`); }); describe('when element is present', () => { -- GitLab From 66706f612aaf95ac1ea5eaeef052c909f023b3d0 Mon Sep 17 00:00:00 2001 From: Stanislav Lashmanov Date: Fri, 24 Feb 2023 05:45:24 +0400 Subject: [PATCH 19/19] Fix blame service total extra pages and overlimit Limit max concurrent requests with total requests Fix blame service incorrect pagination Initialize streaming and pagination beforehand Use floats to get total extra pages Guard against zero extra pages Fix start chunk size Add support for line range anchors --- .../javascripts/streaming/chunk_writer.js | 2 +- .../streaming/handle_streamed_anchor_link.js | 11 +- .../streaming/rate_limit_stream_requests.js | 6 +- app/services/projects/blame_service.rb | 19 ++- spec/features/projects/blobs/blame_spec.rb | 1 - spec/frontend/streaming/chunk_writer_spec.js | 152 ++++++++++-------- .../handle_streamed_anchor_link_spec.js | 63 +++++++- .../rate_limit_stream_requests_spec.js | 22 +++ 8 files changed, 188 insertions(+), 88 deletions(-) diff --git a/app/assets/javascripts/streaming/chunk_writer.js b/app/assets/javascripts/streaming/chunk_writer.js index 2ab9e60bd63a91..4bbd0a5f8438f2 100644 --- a/app/assets/javascripts/streaming/chunk_writer.js +++ b/app/assets/javascripts/streaming/chunk_writer.js @@ -55,7 +55,7 @@ export class ChunkWriter { // ensure we still render chunks over time if the size criteria is not met this.scheduleAccumulatorFlush = throttle(this.flushAccumulator.bind(this), timeout); - const averageSize = Math.round((maxChunkSize - minChunkSize + 1) / 2); + const averageSize = Math.round((maxChunkSize + minChunkSize) / 2); this.size = Math.max(averageSize, minChunkSize); this.balancer = new RenderBalancer({ diff --git a/app/assets/javascripts/streaming/handle_streamed_anchor_link.js b/app/assets/javascripts/streaming/handle_streamed_anchor_link.js index 4644a09ce200af..315dc9bb0a0a89 100644 --- a/app/assets/javascripts/streaming/handle_streamed_anchor_link.js +++ b/app/assets/javascripts/streaming/handle_streamed_anchor_link.js @@ -5,13 +5,14 @@ import LineHighlighter from '~/blob/line_highlighter'; const noop = () => {}; export function handleStreamedAnchorLink(rootElement) { - const anchor = window.location.hash.substring(1); - if (!anchor || document.getElementById(anchor)) return noop; + // "#L100-200" β†’ ['L100', 'L200'] + const [anchorStart, end] = window.location.hash.substring(1).split('-'); + const anchorEnd = end ? `L${end}` : anchorStart; + if (!anchorStart || document.getElementById(anchorEnd)) return noop; const handler = throttle((mutationList, instance) => { - const target = document.getElementById(anchor); - if (!target) return; - scrollToElement(target); + if (!document.getElementById(anchorEnd)) return; + scrollToElement(document.getElementById(anchorStart)); // eslint-disable-next-line no-new new LineHighlighter(); instance.disconnect(); diff --git a/app/assets/javascripts/streaming/rate_limit_stream_requests.js b/app/assets/javascripts/streaming/rate_limit_stream_requests.js index a2f721633b4ed5..04a592baa162f2 100644 --- a/app/assets/javascripts/streaming/rate_limit_stream_requests.js +++ b/app/assets/javascripts/streaming/rate_limit_stream_requests.js @@ -26,6 +26,8 @@ export const rateLimitStreamRequests = ({ immediateCount = maxConcurrentRequests, timeout = 0, }) => { + if (total === 0) return []; + const unsettled = []; const pushUnsettled = (promise) => { @@ -49,7 +51,9 @@ export const rateLimitStreamRequests = ({ }, rej); }; - const immediate = Array.from({ length: immediateCount }, (_, i) => pushUnsettled(factory(i))); + const immediate = Array.from({ length: Math.min(immediateCount, total) }, (_, i) => + pushUnsettled(factory(i)), + ); const queue = []; const flushQueue = () => { diff --git a/app/services/projects/blame_service.rb b/app/services/projects/blame_service.rb index bed574d8fcb967..1ea16040655b1c 100644 --- a/app/services/projects/blame_service.rb +++ b/app/services/projects/blame_service.rb @@ -11,9 +11,10 @@ class BlameService def initialize(blob, commit, params) @blob = blob @commit = commit - @page = extract_page(params) - @pagination_enabled = pagination_state(params) @streaming_enabled = streaming_state(params) + @pagination_enabled = pagination_state(params) + @page = extract_page(params) + @params = params end attr_reader :page, :streaming_enabled @@ -34,11 +35,15 @@ def per_page streaming_enabled ? STREAMING_PER_PAGE : PER_PAGE end - def total_extra_pages - total = blob_lines_count - total = blob_lines_count - STREAMING_FIRST_PAGE_SIZE + per_page if streaming_enabled + def total_pages + total = (blob_lines_count.to_f / per_page).ceil + return total unless streaming_enabled + + ([blob_lines_count - STREAMING_FIRST_PAGE_SIZE, 0].max.to_f / per_page).ceil + 1 + end - (total / per_page).ceil + def total_extra_pages + [total_pages - 1, 0].max end def streaming_possible @@ -86,7 +91,7 @@ def pagination_state(params) end def overlimit?(page) - page > total_extra_pages + page > total_pages end def blob_lines_count diff --git a/spec/features/projects/blobs/blame_spec.rb b/spec/features/projects/blobs/blame_spec.rb index a7bd5b2f212a49..d3558af81b8ab8 100644 --- a/spec/features/projects/blobs/blame_spec.rb +++ b/spec/features/projects/blobs/blame_spec.rb @@ -113,7 +113,6 @@ def visit_blob_blame(path) context 'when streaming is enabled' do before do stub_const('Projects::BlameService::STREAMING_PER_PAGE', 50) - stub_feature_flags(blame_page_streaming: true) end it_behaves_like 'a full blame page' diff --git a/spec/frontend/streaming/chunk_writer_spec.js b/spec/frontend/streaming/chunk_writer_spec.js index 7dd02a2081de69..2aadb332838677 100644 --- a/spec/frontend/streaming/chunk_writer_spec.js +++ b/spec/frontend/streaming/chunk_writer_spec.js @@ -10,8 +10,6 @@ describe('ChunkWriter', () => { let abort; let config; let render; - let increase; - let decrease; const createChunk = (text) => { const encoder = new TextEncoder(); @@ -56,13 +54,7 @@ describe('ChunkWriter', () => { // render until 'false' } }); - RenderBalancer.mockImplementation(({ increase: inc, decrease: dec }) => { - decrease = jest.fn(dec); - increase = jest.fn(inc); - return { - render, - }; - }); + RenderBalancer.mockImplementation(() => ({ render })); }); describe('when chunk length must be "1"', () => { @@ -126,67 +118,91 @@ describe('ChunkWriter', () => { }); }); - describe('when frame time exceeds low limit', () => { - beforeEach(() => { - config = { - minChunkSize: 1, - maxChunkSize: 5, - balanceRate: 10, - }; - }); - - it('increases chunk size', () => { - const text = '111222223'; - const writer = createWriter(); - const chunk = createChunk(text); - - // size: 3 - writer.write(chunk.subarray(0, 3)); - increase(); - - // size: 5 (max) - writer.write(chunk.subarray(3)); - writer.close(); - - expect(accumulator).toBe(text); - expect(write.mock.calls).toMatchObject([['111'], ['22222'], ['3']]); - expect(close).toHaveBeenCalledTimes(1); - }); - }); + describe('chunk balancing', () => { + let increase; + let decrease; + let renderOnce; - describe('when frame time exceeds high limit', () => { beforeEach(() => { - config = { - minChunkSize: 1, - maxChunkSize: 10, - balanceRate: 2, - }; - }); - - it('decreases chunk size', () => { - const text = '111112223345'; - const writer = createWriter(); - const chunk = createChunk(text); - - // size: 5 - writer.write(chunk.subarray(0, 5)); - decrease(); - - // size: 3 - writer.write(chunk.subarray(5, 8)); - decrease(); - - // size: 2 - writer.write(chunk.subarray(8, 10)); - decrease(); - - // size: 1 - writer.write(chunk.subarray(10)); - writer.close(); - - expect(accumulator).toBe(text); - expect(write.mock.calls).toMatchObject([['11111'], ['222'], ['33'], ['4'], ['5']]); - expect(close).toHaveBeenCalledTimes(1); + render = jest.fn((cb) => { + let next = true; + renderOnce = () => { + if (!next) return; + next = cb(); + }; + }); + RenderBalancer.mockImplementation(({ increase: inc, decrease: dec }) => { + increase = jest.fn(inc); + decrease = jest.fn(dec); + return { + render, + }; + }); + }); + + describe('when frame time exceeds low limit', () => { + beforeEach(() => { + config = { + minChunkSize: 1, + maxChunkSize: 5, + balanceRate: 10, + }; + }); + + it('increases chunk size', () => { + const text = '111222223'; + const writer = createWriter(); + const chunk = createChunk(text); + + writer.write(chunk); + + renderOnce(); + increase(); + renderOnce(); + renderOnce(); + + writer.close(); + + expect(accumulator).toBe(text); + expect(write.mock.calls).toMatchObject([['111'], ['22222'], ['3']]); + expect(close).toHaveBeenCalledTimes(1); + }); + }); + + describe('when frame time exceeds high limit', () => { + beforeEach(() => { + config = { + minChunkSize: 1, + maxChunkSize: 10, + balanceRate: 2, + }; + }); + + it('decreases chunk size', () => { + const text = '1111112223345'; + const writer = createWriter(); + const chunk = createChunk(text); + + writer.write(chunk); + + renderOnce(); + decrease(); + + renderOnce(); + decrease(); + + renderOnce(); + decrease(); + + renderOnce(); + renderOnce(); + + writer.close(); + + expect(accumulator).toBe(text); + expect(write.mock.calls).toMatchObject([['111111'], ['222'], ['33'], ['4'], ['5']]); + expect(close).toHaveBeenCalledTimes(1); + }); }); }); diff --git a/spec/frontend/streaming/handle_streamed_anchor_link_spec.js b/spec/frontend/streaming/handle_streamed_anchor_link_spec.js index f20bfa6c37d6f8..ef17957b2fc6a1 100644 --- a/spec/frontend/streaming/handle_streamed_anchor_link_spec.js +++ b/spec/frontend/streaming/handle_streamed_anchor_link_spec.js @@ -9,22 +9,23 @@ jest.mock('~/lib/utils/common_utils'); jest.mock('~/blob/line_highlighter'); describe('handleStreamedAnchorLink', () => { - const anchorName = 'foo'; + const ANCHOR_START = 'L100'; + const ANCHOR_END = '300'; const findRoot = () => document.querySelector('#root'); afterEach(() => { resetHTMLFixture(); }); - describe('when anchor is given', () => { + describe('when single line anchor is given', () => { beforeEach(() => { delete window.location; - window.location = new URL(`${TEST_HOST}#${anchorName}`); + window.location = new URL(`${TEST_HOST}#${ANCHOR_START}`); }); describe('when element is present', () => { beforeEach(() => { - setHTMLFixture(`
`); + setHTMLFixture(`
`); handleStreamedAnchorLink(findRoot()); }); @@ -37,7 +38,59 @@ describe('handleStreamedAnchorLink', () => { describe('when element is streamed', () => { let stop; const insertElement = () => { - findRoot().insertAdjacentHTML('afterbegin', `
`); + findRoot().insertAdjacentHTML('afterbegin', `
`); + }; + + beforeEach(() => { + setHTMLFixture('
'); + stop = handleStreamedAnchorLink(findRoot()); + }); + + afterEach(() => { + stop = undefined; + }); + + it('scrolls to the anchor when inserted', async () => { + insertElement(); + await waitForPromises(); + expect(scrollToElement).toHaveBeenCalledTimes(1); + expect(LineHighlighter).toHaveBeenCalledTimes(1); + }); + + it("doesn't scroll to the anchor when destroyed", async () => { + stop(); + insertElement(); + await waitForPromises(); + expect(scrollToElement).not.toHaveBeenCalled(); + }); + }); + }); + + describe('when line range anchor is given', () => { + beforeEach(() => { + delete window.location; + window.location = new URL(`${TEST_HOST}#${ANCHOR_START}-${ANCHOR_END}`); + }); + + describe('when last element is present', () => { + beforeEach(() => { + setHTMLFixture(`
`); + handleStreamedAnchorLink(findRoot()); + }); + + it('does nothing', async () => { + await waitForPromises(); + expect(scrollToElement).not.toHaveBeenCalled(); + }); + }); + + describe('when last element is streamed', () => { + let stop; + const insertElement = () => { + findRoot().insertAdjacentHTML( + 'afterbegin', + `
`, + ); }; beforeEach(() => { diff --git a/spec/frontend/streaming/rate_limit_stream_requests_spec.js b/spec/frontend/streaming/rate_limit_stream_requests_spec.js index 6544aa46a8fa21..02e3cf93014acb 100644 --- a/spec/frontend/streaming/rate_limit_stream_requests_spec.js +++ b/spec/frontend/streaming/rate_limit_stream_requests_spec.js @@ -17,6 +17,28 @@ describe('rateLimitStreamRequests', () => { }); }; + it('does nothing for zero total requests', () => { + const factory = jest.fn(); + const requests = rateLimitStreamRequests({ + factory, + total: 0, + }); + expect(factory).toHaveBeenCalledTimes(0); + expect(requests.length).toBe(0); + }); + + it('does not exceed total requests', () => { + const factory = createFactory(); + const requests = rateLimitStreamRequests({ + factory, + immediateCount: 100, + maxConcurrentRequests: 100, + total: 2, + }); + expect(factory).toHaveBeenCalledTimes(2); + expect(requests.length).toBe(2); + }); + it('creates immediate requests', () => { const factory = createFactory(); const requests = rateLimitStreamRequests({ -- GitLab