From d4400e6b5c172916eb6dded8ebb04b8b650f493f Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Mon, 26 Mar 2018 17:47:46 +0900 Subject: [PATCH 01/86] Live trace PoC --- lib/gitlab/ci/trace.rb | 17 +--- lib/gitlab/ci/trace/chunked_io.rb | 145 ++++++++++++++++++++++++++++++ lib/gitlab/ci/trace/http_io.rb | 123 ++----------------------- lib/gitlab/ci/trace/live_io.rb | 57 ++++++++++++ 4 files changed, 211 insertions(+), 131 deletions(-) create mode 100644 lib/gitlab/ci/trace/chunked_io.rb create mode 100644 lib/gitlab/ci/trace/live_io.rb diff --git a/lib/gitlab/ci/trace.rb b/lib/gitlab/ci/trace.rb index cedf4171ab1e..10991fb4c94d 100644 --- a/lib/gitlab/ci/trace.rb +++ b/lib/gitlab/ci/trace.rb @@ -61,6 +61,8 @@ def read stream = Gitlab::Ci::Trace::Stream.new do if trace_artifact trace_artifact.open + elsif LiveIO.exists?(job.id) + LiveIO.new(job.id) elsif current_path File.open(current_path, "rb") elsif old_trace @@ -75,7 +77,7 @@ def read def write stream = Gitlab::Ci::Trace::Stream.new do - File.open(ensure_path, "a+b") + LiveIO.new(job.id) end yield(stream).tap do @@ -142,19 +144,6 @@ def create_job_trace!(job, path) end end - def ensure_path - return current_path if current_path - - ensure_directory - default_path - end - - def ensure_directory - unless Dir.exist?(default_directory) - FileUtils.mkdir_p(default_directory) - end - end - def current_path @current_path ||= paths.find do |trace_path| File.exist?(trace_path) diff --git a/lib/gitlab/ci/trace/chunked_io.rb b/lib/gitlab/ci/trace/chunked_io.rb new file mode 100644 index 000000000000..ff5bf59a46ff --- /dev/null +++ b/lib/gitlab/ci/trace/chunked_io.rb @@ -0,0 +1,145 @@ +## +# This class is compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html) +# source: https://gitlab.com/snippets/1685610 +module Gitlab + module Ci + class Trace + class ChunkedIO + attr_reader :size + attr_reader :tell + attr_reader :chunk, :chunk_range + + alias_method :pos, :tell + + def initialize(size) + @size = size + @tell = 0 + end + + def close + # no-op + end + + def binmode + # no-op + end + + def binmode? + true + end + + def path + nil + end + + def seek(pos, where = IO::SEEK_SET) + new_pos = + case where + when IO::SEEK_END + size + pos + when IO::SEEK_SET + pos + when IO::SEEK_CUR + tell + pos + else + -1 + end + + raise 'new position is outside of file' if new_pos < 0 || new_pos > size + + @tell = new_pos + end + + def eof? + tell == size + end + + def each_line + until eof? + line = readline + break if line.nil? + + yield(line) + end + end + + def read(length = nil) + out = "" + + until eof? || (length && out.length >= length) + data = get_chunk + break if data.empty? + + out << data + @tell += data.bytesize + end + + out = out[0, length] if length && out.length > length + + out + end + + def readline + out = "" + + until eof? + data = get_chunk + new_line = data.index("\n") + + if !new_line.nil? + out << data[0..new_line] + @tell += new_line + 1 + break + else + out << data + @tell += data.bytesize + end + end + + out + end + + def write(data) + raise NotImplementedError + end + + def truncate(offset) + raise NotImplementedError + end + + def flush + raise NotImplementedError + end + + def present? + true + end + + private + + ## + # To be overridden by superclasses + # + def get_chunk + raise NotImplementedError + end + + def in_range? + @chunk_range&.include?(tell) + end + + def chunk_offset + tell % BUFFER_SIZE + end + + def chunk_start + (tell / BUFFER_SIZE) * BUFFER_SIZE + end + + def chunk_end + [chunk_start + BUFFER_SIZE, size].min + end + end + end + end +end diff --git a/lib/gitlab/ci/trace/http_io.rb b/lib/gitlab/ci/trace/http_io.rb index ac4308f4e2cb..a3fbb4a8ff5a 100644 --- a/lib/gitlab/ci/trace/http_io.rb +++ b/lib/gitlab/ci/trace/http_io.rb @@ -1,116 +1,26 @@ -## -# This class is compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html) -# source: https://gitlab.com/snippets/1685610 module Gitlab module Ci class Trace - class HttpIO - BUFFER_SIZE = 128.kilobytes - - InvalidURLError = Class.new(StandardError) + class HttpIO < ChunkedIO FailedToGetChunkError = Class.new(StandardError) + InvalidURLError = Class.new(StandardError) - attr_reader :uri, :size - attr_reader :tell - attr_reader :chunk, :chunk_range + BUFFER_SIZE = 128.kilobytes - alias_method :pos, :tell + attr_reader :uri def initialize(url, size) raise InvalidURLError unless ::Gitlab::UrlSanitizer.valid?(url) @uri = URI(url) - @size = size - @tell = 0 - end - - def close - # no-op - end - - def binmode - # no-op - end - - def binmode? - true - end - def path - nil + super end def url @uri.to_s end - def seek(pos, where = IO::SEEK_SET) - new_pos = - case where - when IO::SEEK_END - size + pos - when IO::SEEK_SET - pos - when IO::SEEK_CUR - tell + pos - else - -1 - end - - raise 'new position is outside of file' if new_pos < 0 || new_pos > size - - @tell = new_pos - end - - def eof? - tell == size - end - - def each_line - until eof? - line = readline - break if line.nil? - - yield(line) - end - end - - def read(length = nil) - out = "" - - until eof? || (length && out.length >= length) - data = get_chunk - break if data.empty? - - out << data - @tell += data.bytesize - end - - out = out[0, length] if length && out.length > length - - out - end - - def readline - out = "" - - until eof? - data = get_chunk - new_line = data.index("\n") - - if !new_line.nil? - out << data[0..new_line] - @tell += new_line + 1 - break - else - out << data - @tell += data.bytesize - end - end - - out - end - def write(data) raise NotImplementedError end @@ -123,19 +33,10 @@ def flush raise NotImplementedError end - def present? - true - end - private ## - # The below methods are not implemented in IO class - # - def in_range? - @chunk_range&.include?(tell) - end - + # Override def get_chunk unless in_range? response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == 'https') do |http| @@ -169,18 +70,6 @@ def request request.set_range(chunk_start, BUFFER_SIZE) end end - - def chunk_offset - tell % BUFFER_SIZE - end - - def chunk_start - (tell / BUFFER_SIZE) * BUFFER_SIZE - end - - def chunk_end - [chunk_start + BUFFER_SIZE, size].min - end end end end diff --git a/lib/gitlab/ci/trace/live_io.rb b/lib/gitlab/ci/trace/live_io.rb new file mode 100644 index 000000000000..ae9f6baad66d --- /dev/null +++ b/lib/gitlab/ci/trace/live_io.rb @@ -0,0 +1,57 @@ +module Gitlab + module Ci + class Trace + class LiveIO < ChunkedIO + BUFFER_SIZE = 32.kilobytes + + class << self + def exists?(job_id) + exists_in_redis? || exists_in_database? + end + + def exists_in_redis?(job_id) + Gitlab::Redis::Cache.with do |redis| + redis.exists(buffer_key(job_id)) + end + end + + def exists_in_database?(job_id) + Ci::JobTraceChunk.exists?(job_id: job_id) + end + + def buffer_key(job_id) + "ci:live_trace_buffer:#{job_id}" + end + end + + attr_reader :job_id + + def initialize(job_id) + @job_id = job_id + + super + end + + def write(data) + # TODO: + end + + def truncate(offset) + # TODO: + end + + def flush + # TODO: + end + + private + + ## + # Override + def get_chunk + # TODO: + end + end + end + end +end -- GitLab From accc2cab20cb1546605f05aa68545d2d55c1f522 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Mon, 26 Mar 2018 20:45:18 +0900 Subject: [PATCH 02/86] Add ci_job_trace_chunks table --- app/models/ci/job_trace_chunk.rb | 7 +++++++ ...180326202229_create_ci_job_trace_chunks.rb | 16 ++++++++++++++++ db/schema.rb | 19 +++++++++++++++++++ 3 files changed, 42 insertions(+) create mode 100644 app/models/ci/job_trace_chunk.rb create mode 100644 db/migrate/20180326202229_create_ci_job_trace_chunks.rb diff --git a/app/models/ci/job_trace_chunk.rb b/app/models/ci/job_trace_chunk.rb new file mode 100644 index 000000000000..8998ed920a5f --- /dev/null +++ b/app/models/ci/job_trace_chunk.rb @@ -0,0 +1,7 @@ +module Ci + class JobTraceChunk < ActiveRecord::Base + extend Gitlab::Ci::Model + + belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id + end +end diff --git a/db/migrate/20180326202229_create_ci_job_trace_chunks.rb b/db/migrate/20180326202229_create_ci_job_trace_chunks.rb new file mode 100644 index 000000000000..f7548cd766e6 --- /dev/null +++ b/db/migrate/20180326202229_create_ci_job_trace_chunks.rb @@ -0,0 +1,16 @@ +class CreateCiJobTraceChunks < ActiveRecord::Migration + include Gitlab::Database::MigrationHelpers + + DOWNTIME = false + + def change + create_table :ci_job_trace_chunks do |t| + t.integer :job_id, null: false + t.integer :chunk_index, null: false + t.text :data + + t.foreign_key :ci_builds, column: :job_id, on_delete: :cascade + t.index [:chunk_index, :job_id], unique: true + end + end +end diff --git a/db/schema.rb b/db/schema.rb index 06fc1a9d7e92..54346dadad28 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -370,6 +370,22 @@ add_index "ci_job_artifacts", ["job_id", "file_type"], name: "index_ci_job_artifacts_on_job_id_and_file_type", unique: true, using: :btree add_index "ci_job_artifacts", ["project_id"], name: "index_ci_job_artifacts_on_project_id", using: :btree + create_table "ci_job_trace_chunks", force: :cascade do |t| + t.integer "job_id", null: false + t.integer "chunk_index", null: false + t.text "data" + end + + add_index "ci_job_trace_chunks", ["chunk_index", "job_id"], name: "index_ci_job_trace_chunks_on_chunk_index_and_job_id", unique: true, using: :btree + + create_table "ci_pipeline_chat_data", id: :bigserial, force: :cascade do |t| + t.integer "pipeline_id", null: false + t.integer "chat_name_id", null: false + t.text "response_url", null: false + end + + add_index "ci_pipeline_chat_data", ["pipeline_id"], name: "index_ci_pipeline_chat_data_on_pipeline_id", unique: true, using: :btree + create_table "ci_pipeline_schedule_variables", force: :cascade do |t| t.string "key", null: false t.text "value" @@ -2044,6 +2060,9 @@ add_foreign_key "ci_group_variables", "namespaces", column: "group_id", name: "fk_33ae4d58d8", on_delete: :cascade add_foreign_key "ci_job_artifacts", "ci_builds", column: "job_id", on_delete: :cascade add_foreign_key "ci_job_artifacts", "projects", on_delete: :cascade + add_foreign_key "ci_job_trace_chunks", "ci_builds", column: "job_id", on_delete: :cascade + add_foreign_key "ci_pipeline_chat_data", "chat_names", on_delete: :cascade + add_foreign_key "ci_pipeline_chat_data", "ci_pipelines", column: "pipeline_id", on_delete: :cascade add_foreign_key "ci_pipeline_schedule_variables", "ci_pipeline_schedules", column: "pipeline_schedule_id", name: "fk_41c35fda51", on_delete: :cascade add_foreign_key "ci_pipeline_schedules", "projects", name: "fk_8ead60fcc4", on_delete: :cascade add_foreign_key "ci_pipeline_schedules", "users", column: "owner_id", name: "fk_9ea99f58d2", on_delete: :nullify -- GitLab From 0142f9e0317a2c6f43aa015aa64cec3315431cea Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Tue, 27 Mar 2018 18:01:26 +0900 Subject: [PATCH 03/86] Revert some changes --- lib/gitlab/ci/trace.rb | 6 +- lib/gitlab/ci/trace/chunked_io.rb | 145 ------------------------------ lib/gitlab/ci/trace/http_io.rb | 123 +++++++++++++++++++++++-- 3 files changed, 122 insertions(+), 152 deletions(-) delete mode 100644 lib/gitlab/ci/trace/chunked_io.rb diff --git a/lib/gitlab/ci/trace.rb b/lib/gitlab/ci/trace.rb index 10991fb4c94d..39891c7d1bf3 100644 --- a/lib/gitlab/ci/trace.rb +++ b/lib/gitlab/ci/trace.rb @@ -77,7 +77,11 @@ def read def write stream = Gitlab::Ci::Trace::Stream.new do - LiveIO.new(job.id) + if current_path + current_path + else + LiveIO.new(job.id) + end end yield(stream).tap do diff --git a/lib/gitlab/ci/trace/chunked_io.rb b/lib/gitlab/ci/trace/chunked_io.rb deleted file mode 100644 index ff5bf59a46ff..000000000000 --- a/lib/gitlab/ci/trace/chunked_io.rb +++ /dev/null @@ -1,145 +0,0 @@ -## -# This class is compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html) -# source: https://gitlab.com/snippets/1685610 -module Gitlab - module Ci - class Trace - class ChunkedIO - attr_reader :size - attr_reader :tell - attr_reader :chunk, :chunk_range - - alias_method :pos, :tell - - def initialize(size) - @size = size - @tell = 0 - end - - def close - # no-op - end - - def binmode - # no-op - end - - def binmode? - true - end - - def path - nil - end - - def seek(pos, where = IO::SEEK_SET) - new_pos = - case where - when IO::SEEK_END - size + pos - when IO::SEEK_SET - pos - when IO::SEEK_CUR - tell + pos - else - -1 - end - - raise 'new position is outside of file' if new_pos < 0 || new_pos > size - - @tell = new_pos - end - - def eof? - tell == size - end - - def each_line - until eof? - line = readline - break if line.nil? - - yield(line) - end - end - - def read(length = nil) - out = "" - - until eof? || (length && out.length >= length) - data = get_chunk - break if data.empty? - - out << data - @tell += data.bytesize - end - - out = out[0, length] if length && out.length > length - - out - end - - def readline - out = "" - - until eof? - data = get_chunk - new_line = data.index("\n") - - if !new_line.nil? - out << data[0..new_line] - @tell += new_line + 1 - break - else - out << data - @tell += data.bytesize - end - end - - out - end - - def write(data) - raise NotImplementedError - end - - def truncate(offset) - raise NotImplementedError - end - - def flush - raise NotImplementedError - end - - def present? - true - end - - private - - ## - # To be overridden by superclasses - # - def get_chunk - raise NotImplementedError - end - - def in_range? - @chunk_range&.include?(tell) - end - - def chunk_offset - tell % BUFFER_SIZE - end - - def chunk_start - (tell / BUFFER_SIZE) * BUFFER_SIZE - end - - def chunk_end - [chunk_start + BUFFER_SIZE, size].min - end - end - end - end -end diff --git a/lib/gitlab/ci/trace/http_io.rb b/lib/gitlab/ci/trace/http_io.rb index a3fbb4a8ff5a..ac4308f4e2cb 100644 --- a/lib/gitlab/ci/trace/http_io.rb +++ b/lib/gitlab/ci/trace/http_io.rb @@ -1,26 +1,116 @@ +## +# This class is compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html) +# source: https://gitlab.com/snippets/1685610 module Gitlab module Ci class Trace - class HttpIO < ChunkedIO - FailedToGetChunkError = Class.new(StandardError) + class HttpIO + BUFFER_SIZE = 128.kilobytes + InvalidURLError = Class.new(StandardError) + FailedToGetChunkError = Class.new(StandardError) - BUFFER_SIZE = 128.kilobytes + attr_reader :uri, :size + attr_reader :tell + attr_reader :chunk, :chunk_range - attr_reader :uri + alias_method :pos, :tell def initialize(url, size) raise InvalidURLError unless ::Gitlab::UrlSanitizer.valid?(url) @uri = URI(url) + @size = size + @tell = 0 + end + + def close + # no-op + end + + def binmode + # no-op + end + + def binmode? + true + end - super + def path + nil end def url @uri.to_s end + def seek(pos, where = IO::SEEK_SET) + new_pos = + case where + when IO::SEEK_END + size + pos + when IO::SEEK_SET + pos + when IO::SEEK_CUR + tell + pos + else + -1 + end + + raise 'new position is outside of file' if new_pos < 0 || new_pos > size + + @tell = new_pos + end + + def eof? + tell == size + end + + def each_line + until eof? + line = readline + break if line.nil? + + yield(line) + end + end + + def read(length = nil) + out = "" + + until eof? || (length && out.length >= length) + data = get_chunk + break if data.empty? + + out << data + @tell += data.bytesize + end + + out = out[0, length] if length && out.length > length + + out + end + + def readline + out = "" + + until eof? + data = get_chunk + new_line = data.index("\n") + + if !new_line.nil? + out << data[0..new_line] + @tell += new_line + 1 + break + else + out << data + @tell += data.bytesize + end + end + + out + end + def write(data) raise NotImplementedError end @@ -33,10 +123,19 @@ def flush raise NotImplementedError end + def present? + true + end + private ## - # Override + # The below methods are not implemented in IO class + # + def in_range? + @chunk_range&.include?(tell) + end + def get_chunk unless in_range? response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == 'https') do |http| @@ -70,6 +169,18 @@ def request request.set_range(chunk_start, BUFFER_SIZE) end end + + def chunk_offset + tell % BUFFER_SIZE + end + + def chunk_start + (tell / BUFFER_SIZE) * BUFFER_SIZE + end + + def chunk_end + [chunk_start + BUFFER_SIZE, size].min + end end end end -- GitLab From 85ae610c5e8e4e5c4c01ffb1ae7dfe12bd8120ce Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Thu, 29 Mar 2018 02:13:08 +0900 Subject: [PATCH 04/86] Introduce chunks store --- app/uploaders/job_artifact_uploader.rb | 2 +- lib/gitlab/ci/trace.rb | 13 +- lib/gitlab/ci/trace/chunk_stores/base.rb | 51 ++++ lib/gitlab/ci/trace/chunk_stores/database.rb | 72 ++++++ .../ci/trace/chunk_stores/object_storage.rb | 70 ++++++ lib/gitlab/ci/trace/chunk_stores/redis.rb | 87 +++++++ lib/gitlab/ci/trace/chunked_io.rb | 224 ++++++++++++++++++ lib/gitlab/ci/trace/live_io.rb | 57 ----- lib/gitlab/ci/trace/live_trace.rb | 50 ++++ lib/gitlab/ci/trace/remote.rb | 41 ++++ 10 files changed, 605 insertions(+), 62 deletions(-) create mode 100644 lib/gitlab/ci/trace/chunk_stores/base.rb create mode 100644 lib/gitlab/ci/trace/chunk_stores/database.rb create mode 100644 lib/gitlab/ci/trace/chunk_stores/object_storage.rb create mode 100644 lib/gitlab/ci/trace/chunk_stores/redis.rb create mode 100644 lib/gitlab/ci/trace/chunked_io.rb delete mode 100644 lib/gitlab/ci/trace/live_io.rb create mode 100644 lib/gitlab/ci/trace/live_trace.rb create mode 100644 lib/gitlab/ci/trace/remote.rb diff --git a/app/uploaders/job_artifact_uploader.rb b/app/uploaders/job_artifact_uploader.rb index ef0f8acefd69..5f805e8ecee1 100644 --- a/app/uploaders/job_artifact_uploader.rb +++ b/app/uploaders/job_artifact_uploader.rb @@ -18,7 +18,7 @@ def open if file_storage? File.open(path, "rb") if path else - ::Gitlab::Ci::Trace::HttpIO.new(url, size) if url + ::Gitlab::Ci::Trace::Remote.new(model.job_id, url, size, "rb") if url end end diff --git a/lib/gitlab/ci/trace.rb b/lib/gitlab/ci/trace.rb index 39891c7d1bf3..3c7bb9c548cb 100644 --- a/lib/gitlab/ci/trace.rb +++ b/lib/gitlab/ci/trace.rb @@ -61,8 +61,8 @@ def read stream = Gitlab::Ci::Trace::Stream.new do if trace_artifact trace_artifact.open - elsif LiveIO.exists?(job.id) - LiveIO.new(job.id) + elsif LiveTrace.exists?(job.id) + LiveTrace.new(job.id, "rb") elsif current_path File.open(current_path, "rb") elsif old_trace @@ -80,7 +80,7 @@ def write if current_path current_path else - LiveIO.new(job.id) + LiveTrace.new(job.id, "a+b") end end @@ -105,7 +105,12 @@ def archive! raise ArchiveError, 'Already archived' if trace_artifact raise ArchiveError, 'Job is not finished yet' unless job.complete? - if current_path + if LiveTrace.exists?(job.id) + LiveTrace.new(job.id, "rb") do |stream| + archive_stream!(stream) + job.erase_old_trace! + end + elsif current_path File.open(current_path) do |stream| archive_stream!(stream) FileUtils.rm(current_path) diff --git a/lib/gitlab/ci/trace/chunk_stores/base.rb b/lib/gitlab/ci/trace/chunk_stores/base.rb new file mode 100644 index 000000000000..3bf2950d8711 --- /dev/null +++ b/lib/gitlab/ci/trace/chunk_stores/base.rb @@ -0,0 +1,51 @@ +module Gitlab + module Ci + class Trace + module ChunkStores + class Base + InitializeError = Class.new(StandardError) + NotSupportedError = Class.new(StandardError) + + attr_reader :chunk_start + attr_reader :chunk_index + attr_reader :buffer_size + attr_reader :url + + def initialize(*identifiers, **params) + @buffer_size = params[:buffer_size] + @chunk_start = params[:chunk_start] + @url = params[:url] + end + + def exist? + raise NotImplementedError + end + + def get + raise NotImplementedError + end + + def size + raise NotImplementedError + end + + def write!(data) + raise NotImplementedError + end + + def truncate!(offset) + raise NotImplementedError + end + + def delete! + raise NotImplementedError + end + + def filled? + size == buffer_size + end + end + end + end + end +end diff --git a/lib/gitlab/ci/trace/chunk_stores/database.rb b/lib/gitlab/ci/trace/chunk_stores/database.rb new file mode 100644 index 000000000000..ea4b36d21b57 --- /dev/null +++ b/lib/gitlab/ci/trace/chunk_stores/database.rb @@ -0,0 +1,72 @@ +module Gitlab + module Ci + class Trace + module ChunkStores + class Database < Base + class << self + def open(job_id, chunk_index, **params) + raise ArgumentError unless job_id && chunk_index + + job = Ci::JobTraceChunk.find_or_initialize_by(job_id: job_id, chunk_index: chunk_index) + + yield self.class.new(job, params) + end + + def exist?(job_id, chunk_index) + Ci::JobTraceChunk.exists?(job_id: job_id, chunk_index: chunk_index) + end + + def chunks_count(job_id) + Ci::JobTraceChunk.where(job_id: job_id).count + end + + def chunks_size(job_id) + Ci::JobTraceChunk.where(job_id: job_id).pluck('len(data)') + .inject(0){ |sum, data_length| sum + data_length } + end + + def delete_all(job_id) + Ci::JobTraceChunk.destroy_all(job_id: job_id) + end + end + + attr_reader :job + + def initialize(job, **params) + super + + @job = job + end + + def get + job.data + end + + def size + job.data&.length || 0 + end + + def write!(data) + raise NotSupportedError, 'Only full size is supported' unless buffer_size == data.length + + job.create!(data: data) + + data.length + end + + def truncate!(offset) + raise NotSupportedError + end + + def delete! + job.destroy! + end + + # def change_chunk_index!(job_id, new_chunk_index) + # raise NotSupportedError + # end + end + end + end + end +end diff --git a/lib/gitlab/ci/trace/chunk_stores/object_storage.rb b/lib/gitlab/ci/trace/chunk_stores/object_storage.rb new file mode 100644 index 000000000000..97f75c81b1b9 --- /dev/null +++ b/lib/gitlab/ci/trace/chunk_stores/object_storage.rb @@ -0,0 +1,70 @@ +module Gitlab + module Ci + class Trace + module ChunkStores + class ObjectStorage < Base + class << self + def open(job_id, chunk_index, **params) + raise ArgumentError unless job_id && chunk_index + + yield self.class.new(params) + end + + def exist?(job_id, chunk_index) + raise NotSupportedError + end + + def chunks_count(job_id) + raise NotSupportedError + end + end + + InvalidURLError = Class.new(StandardError) + FailedToGetChunkError = Class.new(StandardError) + + attr_reader :url + + def initialize(**params) + raise InvalidURLError unless ::Gitlab::UrlSanitizer.valid?(url) + + super + + @uri = URI(url) + end + + def get + response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == 'https') do |http| + request = Net::HTTP::Get.new(uri) + request.set_range(chunk_start, buffer_size) + http.request(request) + end + + raise FailedToGetChunkError unless response.code == '200' || response.code == '206' + + response.body.force_encoding(Encoding::BINARY) + end + + def size + raise NotImplementedError + end + + def write!(data) + raise NotImplementedError + end + + def truncate!(offset) + raise NotImplementedError + end + + def delete + raise NotImplementedError + end + + def change_chunk_index!(job_id, new_chunk_index) + raise NotImplementedError + end + end + end + end + end +end diff --git a/lib/gitlab/ci/trace/chunk_stores/redis.rb b/lib/gitlab/ci/trace/chunk_stores/redis.rb new file mode 100644 index 000000000000..67fcdb32ff99 --- /dev/null +++ b/lib/gitlab/ci/trace/chunk_stores/redis.rb @@ -0,0 +1,87 @@ +module Gitlab + module Ci + class Trace + module ChunkStores + class Redis < Base + class << self + def open(job_id, chunk_index, **params) + raise ArgumentError unless job_id && chunk_index + + yield self.class.new(self.buffer_key(job_id, chunk_index), params) + end + + def exist?(job_id, chunk_index) + Gitlab::Redis::Cache.with do |redis| + redis.exists(self.buffer_key(job_id, chunk_index)) + end + end + + def chunks_count(job_id) + Gitlab::Redis::Cache.with do |redis| + redis.keys(buffer_key(job_id, '*')).count + end + end + + def chunks_size(job_id) + Gitlab::Redis::Cache.with do |redis| + redis.keys(buffer_key(job_id, '*')).inject(0) do |sum, key| + sum + redis.strlen(key) + end + end + end + + def buffer_key(job_id, chunk_index) + "live_trace_buffer:#{job_id}:#{chunk_index}" + end + end + + attr_reader :buffer_key + + def initialize(buffer_key, **params) + super + + @buffer_key = buffer_key + end + + def get + Gitlab::Redis::Cache.with do |redis| + redis.get(buffer_key) + end + end + + def size + Gitlab::Redis::Cache.with do |redis| + redis.strlen(buffer_key) + end + end + + def write!(data) + Gitlab::Redis::Cache.with do |redis| + redis.set(buffer_key, data) + end + end + + def truncate!(offset) + Gitlab::Redis::Cache.with do |redis| + truncated_data = redis.getrange(buffer_key, 0, offset) + redis.set(buffer_key, truncated_data) + end + end + + def delete! + Gitlab::Redis::Cache.with do |redis| + redis.del(buffer_key) + end + end + + # def change_chunk_index!(job_id, new_chunk_index) + # Gitlab::Redis::Cache.with do |redis| + # new_buffer_key = self.class.buffer_key(job_id, new_chunk_index) + # redis.rename(buffer_key, new_buffer_key) + # end + # end + end + end + end + end +end diff --git a/lib/gitlab/ci/trace/chunked_io.rb b/lib/gitlab/ci/trace/chunked_io.rb new file mode 100644 index 000000000000..2d6383338b13 --- /dev/null +++ b/lib/gitlab/ci/trace/chunked_io.rb @@ -0,0 +1,224 @@ +## +# This class is compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html) +# source: https://gitlab.com/snippets/1685610 +module Gitlab + module Ci + class Trace + class ChunkedIO + WriteError = Class.new(StandardError) + + attr_reader :size + attr_reader :tell + attr_reader :chunk, :chunk_range + attr_reader :write_lock_uuid + attr_reader :job_id + + alias_method :pos, :tell + + def initialize(job_id, size, mode) + @size = size + @tell = 0 + @job_id = job_id + + if /(w|a)/ =~ mode + @write_lock_uuid = Gitlab::ExclusiveLease.new(write_lock_key, timeout: 5.minutes.to_i).try_obtain + raise WriteError, 'Already opened by another process' unless write_lock_uuid + end + end + + def close + Gitlab::ExclusiveLease.cancel(write_lock_key, write_lock_uuid) if write_lock_uuid + end + + def binmode + # no-op + end + + def binmode? + true + end + + def path + nil + end + + def seek(pos, where = IO::SEEK_SET) + new_pos = + case where + when IO::SEEK_END + size + pos + when IO::SEEK_SET + pos + when IO::SEEK_CUR + tell + pos + else + -1 + end + + raise 'new position is outside of file' if new_pos < 0 || new_pos > size + + @tell = new_pos + end + + def eof? + tell == size + end + + def each_line + until eof? + line = readline + break if line.nil? + + yield(line) + end + end + + def read(length = nil) + out = "" + + until eof? || (length && out.length >= length) + data = get_chunk + break if data.empty? + + out << data + @tell += data.bytesize + end + + out = out[0, length] if length && out.length > length + + out + end + + def readline + out = "" + + until eof? + data = get_chunk + new_line = data.index("\n") + + if !new_line.nil? + out << data[0..new_line] + @tell += new_line + 1 + break + else + out << data + @tell += data.bytesize + end + end + + out + end + + def write(data, &block) + raise WriteError, 'Already opened by another process' unless write_lock_uuid + + while data.present? + empty_space = BUFFER_SIZE - chunk_offset + + chunk_store.open(job_id, chunk_index, params_for_store) do |store| + data_to_write = '' + data_to_write += store.get if store.size > 0 + data_to_write += data.slice!(empty_space) + + written_size = store.write!(data_to_write) + + raise WriteError, 'Written size mismatch' unless data_to_write.length == written_size + + block.call(store, chunk_index) if block_given? + + @tell += written_size + @size += written_size + end + end + end + + def truncate(offset) + raise WriteError, 'Already opened by another process' unless write_lock_uuid + + removal_chunk_index_start = (offset / BUFFER_SIZE) + removal_chunk_index_end = total_chunk_count - 1 + removal_chunk_offset = offset % BUFFER_SIZE + + if removal_chunk_offset > 0 + chunk_store.open(job_id, removal_chunk_index_start, params_for_store) do |store| + store.truncate!(removal_chunk_offset) + end + + removal_chunk_index_start += 1 + end + + (removal_chunk_index_start..removal_chunk_index_end).each do |removal_chunk_index| + chunk_store.open(job_id, removal_chunk_index, params_for_store) do |store| + store.delete! + end + end + + @tell = @size = offset + end + + def flush + # no-op + end + + def present? + true + end + + private + + ## + # The below methods are not implemented in IO class + # + def in_range? + @chunk_range&.include?(tell) + end + + def get_chunk + unless in_range? + chunk_store.open(job_id, chunk_index, params_for_store) do |store| + @chunk = store.get + @chunk_range = (chunk_start...(chunk_start + @chunk.length)) + end + end + + @chunk[chunk_offset..BUFFER_SIZE] + end + + def params_for_store + { + buffer_size: BUFFER_SIZE, + chunk_start: chunk_start + } + end + + def chunk_offset + tell % BUFFER_SIZE + end + + def chunk_start + (tell / BUFFER_SIZE) * BUFFER_SIZE + end + + def chunk_end + [chunk_start + BUFFER_SIZE, size].min + end + + def chunk_index + (tell / BUFFER_SIZE) + end + + def total_chunk_count + (size / BUFFER_SIZE) + 1 + end + + def last_chunk? + chunk_index == (total_chunk_count - 1) + end + + def write_lock_key + "live_trace_write:#{job_id}" + end + end + end + end +end diff --git a/lib/gitlab/ci/trace/live_io.rb b/lib/gitlab/ci/trace/live_io.rb deleted file mode 100644 index ae9f6baad66d..000000000000 --- a/lib/gitlab/ci/trace/live_io.rb +++ /dev/null @@ -1,57 +0,0 @@ -module Gitlab - module Ci - class Trace - class LiveIO < ChunkedIO - BUFFER_SIZE = 32.kilobytes - - class << self - def exists?(job_id) - exists_in_redis? || exists_in_database? - end - - def exists_in_redis?(job_id) - Gitlab::Redis::Cache.with do |redis| - redis.exists(buffer_key(job_id)) - end - end - - def exists_in_database?(job_id) - Ci::JobTraceChunk.exists?(job_id: job_id) - end - - def buffer_key(job_id) - "ci:live_trace_buffer:#{job_id}" - end - end - - attr_reader :job_id - - def initialize(job_id) - @job_id = job_id - - super - end - - def write(data) - # TODO: - end - - def truncate(offset) - # TODO: - end - - def flush - # TODO: - end - - private - - ## - # Override - def get_chunk - # TODO: - end - end - end - end -end diff --git a/lib/gitlab/ci/trace/live_trace.rb b/lib/gitlab/ci/trace/live_trace.rb new file mode 100644 index 000000000000..667fdec557e0 --- /dev/null +++ b/lib/gitlab/ci/trace/live_trace.rb @@ -0,0 +1,50 @@ +module Gitlab + module Ci + class Trace + class LiveTrace < ChunkedIO + BUFFER_SIZE = 128.kilobytes + + class << self + def exist?(job_id) + ChunkStores::Redis.chunks_count(job_id) > 0 || + ChunkStores::Database.chunks_count(job_id) > 0 + end + end + + def initialize(job_id, mode) + super(job_id, calculate_size, mode) + end + + def write(data) + raise NotImplementedError, 'Overwrite is not supported' unless tell == size + + super(data) do |store| + if store.filled? + # Rotate data from redis to database + ChunkStores::Database.open(job_id, chunk_index, params_for_store) do |to_store| + to_store.write!(store.get) + end + + store.delete! + end + end + end + + private + + def calculate_size + ChunkStores::Redis.chunks_size(job_id) + + ChunkStores::Database.chunks_size(job_id) + end + + def chunk_store + if last_chunk? + ChunkStores::Redis + else + ChunkStores::Database + end + end + end + end + end +end diff --git a/lib/gitlab/ci/trace/remote.rb b/lib/gitlab/ci/trace/remote.rb new file mode 100644 index 000000000000..ea27fdc27187 --- /dev/null +++ b/lib/gitlab/ci/trace/remote.rb @@ -0,0 +1,41 @@ +module Gitlab + module Ci + class Trace + class Remote < ChunkedIO + BUFFER_SIZE = 128.kilobytes + + NoSupportError = Class.new(StandardError) + + attr_reader :uri + + def initialize(job_id, url, size, mode) + @uri = URI(url) + + super(job_id, size, mode) + end + + def write(data) + raise NoSupportError + end + + def truncate(offset) + raise NoSupportError + end + + def flush + raise NoSupportError + end + + private + + def chunk_store + ChunkStores::Http + end + + def params_for_store + super.merge( { uri: uri } ) + end + end + end + end +end -- GitLab From eb64ecb2cb820b0a8d6e17fded2e596a58de31f8 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Thu, 29 Mar 2018 03:45:07 +0900 Subject: [PATCH 05/86] Clarify namespaces --- app/uploaders/job_artifact_uploader.rb | 2 +- lib/gitlab/ci/trace.rb | 12 +-- lib/gitlab/ci/trace/chunk_stores/base.rb | 51 ----------- lib/gitlab/ci/trace/chunk_stores/database.rb | 72 --------------- .../ci/trace/chunk_stores/object_storage.rb | 70 --------------- lib/gitlab/ci/trace/chunk_stores/redis.rb | 87 ------------------- lib/gitlab/ci/trace/chunked_io.rb | 12 ++- lib/gitlab/ci/trace/file/chunk_store/base.rb | 48 ++++++++++ .../ci/trace/file/chunk_store/database.rb | 66 ++++++++++++++ .../trace/file/chunk_store/object_storage.rb | 61 +++++++++++++ lib/gitlab/ci/trace/file/chunk_store/redis.rb | 82 +++++++++++++++++ lib/gitlab/ci/trace/file/live_trace.rb | 60 +++++++++++++ lib/gitlab/ci/trace/file/remote.rb | 55 ++++++++++++ lib/gitlab/ci/trace/live_trace.rb | 50 ----------- lib/gitlab/ci/trace/remote.rb | 41 --------- 15 files changed, 387 insertions(+), 382 deletions(-) delete mode 100644 lib/gitlab/ci/trace/chunk_stores/base.rb delete mode 100644 lib/gitlab/ci/trace/chunk_stores/database.rb delete mode 100644 lib/gitlab/ci/trace/chunk_stores/object_storage.rb delete mode 100644 lib/gitlab/ci/trace/chunk_stores/redis.rb create mode 100644 lib/gitlab/ci/trace/file/chunk_store/base.rb create mode 100644 lib/gitlab/ci/trace/file/chunk_store/database.rb create mode 100644 lib/gitlab/ci/trace/file/chunk_store/object_storage.rb create mode 100644 lib/gitlab/ci/trace/file/chunk_store/redis.rb create mode 100644 lib/gitlab/ci/trace/file/live_trace.rb create mode 100644 lib/gitlab/ci/trace/file/remote.rb delete mode 100644 lib/gitlab/ci/trace/live_trace.rb delete mode 100644 lib/gitlab/ci/trace/remote.rb diff --git a/app/uploaders/job_artifact_uploader.rb b/app/uploaders/job_artifact_uploader.rb index 5f805e8ecee1..d7313b65069c 100644 --- a/app/uploaders/job_artifact_uploader.rb +++ b/app/uploaders/job_artifact_uploader.rb @@ -18,7 +18,7 @@ def open if file_storage? File.open(path, "rb") if path else - ::Gitlab::Ci::Trace::Remote.new(model.job_id, url, size, "rb") if url + ::Gitlab::Ci::Trace::RemoteFile.new(model.job_id, url, size, "rb") if url end end diff --git a/lib/gitlab/ci/trace.rb b/lib/gitlab/ci/trace.rb index 3c7bb9c548cb..c36c6f557e48 100644 --- a/lib/gitlab/ci/trace.rb +++ b/lib/gitlab/ci/trace.rb @@ -61,8 +61,8 @@ def read stream = Gitlab::Ci::Trace::Stream.new do if trace_artifact trace_artifact.open - elsif LiveTrace.exists?(job.id) - LiveTrace.new(job.id, "rb") + elsif LiveTraceFile.exists?(job.id) + LiveTraceFile.new(job.id, "rb") elsif current_path File.open(current_path, "rb") elsif old_trace @@ -80,7 +80,7 @@ def write if current_path current_path else - LiveTrace.new(job.id, "a+b") + LiveTraceFile.new(job.id, "a+b") end end @@ -105,10 +105,10 @@ def archive! raise ArchiveError, 'Already archived' if trace_artifact raise ArchiveError, 'Job is not finished yet' unless job.complete? - if LiveTrace.exists?(job.id) - LiveTrace.new(job.id, "rb") do |stream| + if LiveTraceFile.exists?(job.id) + LiveTraceFile.open(job.id, "wb") do |stream| archive_stream!(stream) - job.erase_old_trace! + stream.truncate(0) end elsif current_path File.open(current_path) do |stream| diff --git a/lib/gitlab/ci/trace/chunk_stores/base.rb b/lib/gitlab/ci/trace/chunk_stores/base.rb deleted file mode 100644 index 3bf2950d8711..000000000000 --- a/lib/gitlab/ci/trace/chunk_stores/base.rb +++ /dev/null @@ -1,51 +0,0 @@ -module Gitlab - module Ci - class Trace - module ChunkStores - class Base - InitializeError = Class.new(StandardError) - NotSupportedError = Class.new(StandardError) - - attr_reader :chunk_start - attr_reader :chunk_index - attr_reader :buffer_size - attr_reader :url - - def initialize(*identifiers, **params) - @buffer_size = params[:buffer_size] - @chunk_start = params[:chunk_start] - @url = params[:url] - end - - def exist? - raise NotImplementedError - end - - def get - raise NotImplementedError - end - - def size - raise NotImplementedError - end - - def write!(data) - raise NotImplementedError - end - - def truncate!(offset) - raise NotImplementedError - end - - def delete! - raise NotImplementedError - end - - def filled? - size == buffer_size - end - end - end - end - end -end diff --git a/lib/gitlab/ci/trace/chunk_stores/database.rb b/lib/gitlab/ci/trace/chunk_stores/database.rb deleted file mode 100644 index ea4b36d21b57..000000000000 --- a/lib/gitlab/ci/trace/chunk_stores/database.rb +++ /dev/null @@ -1,72 +0,0 @@ -module Gitlab - module Ci - class Trace - module ChunkStores - class Database < Base - class << self - def open(job_id, chunk_index, **params) - raise ArgumentError unless job_id && chunk_index - - job = Ci::JobTraceChunk.find_or_initialize_by(job_id: job_id, chunk_index: chunk_index) - - yield self.class.new(job, params) - end - - def exist?(job_id, chunk_index) - Ci::JobTraceChunk.exists?(job_id: job_id, chunk_index: chunk_index) - end - - def chunks_count(job_id) - Ci::JobTraceChunk.where(job_id: job_id).count - end - - def chunks_size(job_id) - Ci::JobTraceChunk.where(job_id: job_id).pluck('len(data)') - .inject(0){ |sum, data_length| sum + data_length } - end - - def delete_all(job_id) - Ci::JobTraceChunk.destroy_all(job_id: job_id) - end - end - - attr_reader :job - - def initialize(job, **params) - super - - @job = job - end - - def get - job.data - end - - def size - job.data&.length || 0 - end - - def write!(data) - raise NotSupportedError, 'Only full size is supported' unless buffer_size == data.length - - job.create!(data: data) - - data.length - end - - def truncate!(offset) - raise NotSupportedError - end - - def delete! - job.destroy! - end - - # def change_chunk_index!(job_id, new_chunk_index) - # raise NotSupportedError - # end - end - end - end - end -end diff --git a/lib/gitlab/ci/trace/chunk_stores/object_storage.rb b/lib/gitlab/ci/trace/chunk_stores/object_storage.rb deleted file mode 100644 index 97f75c81b1b9..000000000000 --- a/lib/gitlab/ci/trace/chunk_stores/object_storage.rb +++ /dev/null @@ -1,70 +0,0 @@ -module Gitlab - module Ci - class Trace - module ChunkStores - class ObjectStorage < Base - class << self - def open(job_id, chunk_index, **params) - raise ArgumentError unless job_id && chunk_index - - yield self.class.new(params) - end - - def exist?(job_id, chunk_index) - raise NotSupportedError - end - - def chunks_count(job_id) - raise NotSupportedError - end - end - - InvalidURLError = Class.new(StandardError) - FailedToGetChunkError = Class.new(StandardError) - - attr_reader :url - - def initialize(**params) - raise InvalidURLError unless ::Gitlab::UrlSanitizer.valid?(url) - - super - - @uri = URI(url) - end - - def get - response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == 'https') do |http| - request = Net::HTTP::Get.new(uri) - request.set_range(chunk_start, buffer_size) - http.request(request) - end - - raise FailedToGetChunkError unless response.code == '200' || response.code == '206' - - response.body.force_encoding(Encoding::BINARY) - end - - def size - raise NotImplementedError - end - - def write!(data) - raise NotImplementedError - end - - def truncate!(offset) - raise NotImplementedError - end - - def delete - raise NotImplementedError - end - - def change_chunk_index!(job_id, new_chunk_index) - raise NotImplementedError - end - end - end - end - end -end diff --git a/lib/gitlab/ci/trace/chunk_stores/redis.rb b/lib/gitlab/ci/trace/chunk_stores/redis.rb deleted file mode 100644 index 67fcdb32ff99..000000000000 --- a/lib/gitlab/ci/trace/chunk_stores/redis.rb +++ /dev/null @@ -1,87 +0,0 @@ -module Gitlab - module Ci - class Trace - module ChunkStores - class Redis < Base - class << self - def open(job_id, chunk_index, **params) - raise ArgumentError unless job_id && chunk_index - - yield self.class.new(self.buffer_key(job_id, chunk_index), params) - end - - def exist?(job_id, chunk_index) - Gitlab::Redis::Cache.with do |redis| - redis.exists(self.buffer_key(job_id, chunk_index)) - end - end - - def chunks_count(job_id) - Gitlab::Redis::Cache.with do |redis| - redis.keys(buffer_key(job_id, '*')).count - end - end - - def chunks_size(job_id) - Gitlab::Redis::Cache.with do |redis| - redis.keys(buffer_key(job_id, '*')).inject(0) do |sum, key| - sum + redis.strlen(key) - end - end - end - - def buffer_key(job_id, chunk_index) - "live_trace_buffer:#{job_id}:#{chunk_index}" - end - end - - attr_reader :buffer_key - - def initialize(buffer_key, **params) - super - - @buffer_key = buffer_key - end - - def get - Gitlab::Redis::Cache.with do |redis| - redis.get(buffer_key) - end - end - - def size - Gitlab::Redis::Cache.with do |redis| - redis.strlen(buffer_key) - end - end - - def write!(data) - Gitlab::Redis::Cache.with do |redis| - redis.set(buffer_key, data) - end - end - - def truncate!(offset) - Gitlab::Redis::Cache.with do |redis| - truncated_data = redis.getrange(buffer_key, 0, offset) - redis.set(buffer_key, truncated_data) - end - end - - def delete! - Gitlab::Redis::Cache.with do |redis| - redis.del(buffer_key) - end - end - - # def change_chunk_index!(job_id, new_chunk_index) - # Gitlab::Redis::Cache.with do |redis| - # new_buffer_key = self.class.buffer_key(job_id, new_chunk_index) - # redis.rename(buffer_key, new_buffer_key) - # end - # end - end - end - end - end -end diff --git a/lib/gitlab/ci/trace/chunked_io.rb b/lib/gitlab/ci/trace/chunked_io.rb index 2d6383338b13..7f1d59dd6d53 100644 --- a/lib/gitlab/ci/trace/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_io.rb @@ -136,7 +136,7 @@ def truncate(offset) raise WriteError, 'Already opened by another process' unless write_lock_uuid removal_chunk_index_start = (offset / BUFFER_SIZE) - removal_chunk_index_end = total_chunk_count - 1 + removal_chunk_index_end = chunks_count - 1 removal_chunk_offset = offset % BUFFER_SIZE if removal_chunk_offset > 0 @@ -164,6 +164,10 @@ def present? true end + def delete_chunks! + truncate(0) + end + private ## @@ -207,16 +211,16 @@ def chunk_index (tell / BUFFER_SIZE) end - def total_chunk_count + def chunks_count (size / BUFFER_SIZE) + 1 end def last_chunk? - chunk_index == (total_chunk_count - 1) + chunk_index == (chunks_count - 1) end def write_lock_key - "live_trace_write:#{job_id}" + "live_trace:operation:write:#{job_id}" end end end diff --git a/lib/gitlab/ci/trace/file/chunk_store/base.rb b/lib/gitlab/ci/trace/file/chunk_store/base.rb new file mode 100644 index 000000000000..ec3748b553f0 --- /dev/null +++ b/lib/gitlab/ci/trace/file/chunk_store/base.rb @@ -0,0 +1,48 @@ +module Gitlab + module Ci + class Trace + module File + module ChunkStore + class Base + InitializeError = Class.new(StandardError) + NotSupportedError = Class.new(StandardError) + + attr_reader :buffer_size + attr_reader :chunk_start + attr_reader :url + + def initialize(*identifiers, **params) + @buffer_size = params[:buffer_size] + @chunk_start = params[:chunk_start] + @url = params[:url] + end + + def get + raise NotImplementedError + end + + def size + raise NotImplementedError + end + + def write!(data) + raise NotImplementedError + end + + def truncate!(offset) + raise NotImplementedError + end + + def delete! + raise NotImplementedError + end + + def filled? + size == buffer_size + end + end + end + end + end + end +end diff --git a/lib/gitlab/ci/trace/file/chunk_store/database.rb b/lib/gitlab/ci/trace/file/chunk_store/database.rb new file mode 100644 index 000000000000..018675e26c73 --- /dev/null +++ b/lib/gitlab/ci/trace/file/chunk_store/database.rb @@ -0,0 +1,66 @@ +module Gitlab + module Ci + class Trace + module File + module ChunkStore + class Database < Base + class << self + def open(job_id, chunk_index, **params) + raise ArgumentError unless job_id && chunk_index + + job = Ci::JobTraceChunk.find_or_initialize_by(job_id: job_id, chunk_index: chunk_index) + + yield self.class.new(job, params) + end + + def exist?(job_id, chunk_index) + Ci::JobTraceChunk.exists?(job_id: job_id, chunk_index: chunk_index) + end + + def chunks_count(job_id) + Ci::JobTraceChunk.where(job_id: job_id).count + end + + def chunks_size(job_id) + Ci::JobTraceChunk.where(job_id: job_id).pluck('len(data)') + .inject(0){ |sum, data_length| sum + data_length } + end + end + + attr_reader :job + + def initialize(job, **params) + super + + @job = job + end + + def get + job.data + end + + def size + job.data&.length || 0 + end + + def write!(data) + raise NotImplementedError, 'Only full size write is supported' unless buffer_size == data.length + + job.create!(data: data) + + data.length + end + + def truncate!(offset) + raise NotImplementedError + end + + def delete! + job.destroy! + end + end + end + end + end + end +end diff --git a/lib/gitlab/ci/trace/file/chunk_store/object_storage.rb b/lib/gitlab/ci/trace/file/chunk_store/object_storage.rb new file mode 100644 index 000000000000..28ce0f2afc1e --- /dev/null +++ b/lib/gitlab/ci/trace/file/chunk_store/object_storage.rb @@ -0,0 +1,61 @@ +module Gitlab + module Ci + class Trace + module File + module ChunkStore + class ObjectStorage < Base + class << self + def open(job_id, chunk_index, **params) + raise ArgumentError unless job_id && chunk_index + + yield self.class.new(params) + end + + def exist?(job_id, chunk_index) + raise NotSupportedError + end + + def chunks_count(job_id) + raise NotSupportedError + end + end + + FailedToGetChunkError = Class.new(StandardError) + + def initialize(**params) + super + end + + def get + response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == 'https') do |http| + request = Net::HTTP::Get.new(uri) + request.set_range(chunk_start, buffer_size) + http.request(request) + end + + raise FailedToGetChunkError unless response.code == '200' || response.code == '206' + + response.body.force_encoding(Encoding::BINARY) + end + + def size + raise NotImplementedError + end + + def write!(data) + raise NotImplementedError + end + + def truncate!(offset) + raise NotImplementedError + end + + def delete! + raise NotImplementedError + end + end + end + end + end + end +end diff --git a/lib/gitlab/ci/trace/file/chunk_store/redis.rb b/lib/gitlab/ci/trace/file/chunk_store/redis.rb new file mode 100644 index 000000000000..c525bcb797f9 --- /dev/null +++ b/lib/gitlab/ci/trace/file/chunk_store/redis.rb @@ -0,0 +1,82 @@ +module Gitlab + module Ci + class Trace + module File + module ChunkStore + class Redis < Base + class << self + def open(job_id, chunk_index, **params) + raise ArgumentError unless job_id && chunk_index + + yield self.class.new(self.buffer_key(job_id, chunk_index), params) + end + + def exist?(job_id, chunk_index) + Gitlab::Redis::Cache.with do |redis| + redis.exists(self.buffer_key(job_id, chunk_index)) + end + end + + def chunks_count(job_id) + Gitlab::Redis::Cache.with do |redis| + redis.keys(buffer_key(job_id, '*')).count + end + end + + def chunks_size(job_id) + Gitlab::Redis::Cache.with do |redis| + redis.keys(buffer_key(job_id, '*')).inject(0) do |sum, key| + sum + redis.strlen(key) + end + end + end + + def buffer_key(job_id, chunk_index) + "live_trace_buffer:#{job_id}:#{chunk_index}" + end + end + + attr_reader :buffer_key + + def initialize(buffer_key, **params) + super + + @buffer_key = buffer_key + end + + def get + Gitlab::Redis::Cache.with do |redis| + redis.get(buffer_key) + end + end + + def size + Gitlab::Redis::Cache.with do |redis| + redis.strlen(buffer_key) + end + end + + def write!(data) + Gitlab::Redis::Cache.with do |redis| + redis.set(buffer_key, data) + end + end + + def truncate!(offset) + Gitlab::Redis::Cache.with do |redis| + truncated_data = redis.getrange(buffer_key, 0, offset) + redis.set(buffer_key, truncated_data) + end + end + + def delete! + Gitlab::Redis::Cache.with do |redis| + redis.del(buffer_key) + end + end + end + end + end + end + end +end diff --git a/lib/gitlab/ci/trace/file/live_trace.rb b/lib/gitlab/ci/trace/file/live_trace.rb new file mode 100644 index 000000000000..0a5b855d55e9 --- /dev/null +++ b/lib/gitlab/ci/trace/file/live_trace.rb @@ -0,0 +1,60 @@ +module Gitlab + module Ci + class Trace + module File + class LiveTrace < ChunkedIO + BUFFER_SIZE = 128.kilobytes + + class << self + def open(job_id, mode) + stream = self.class.new(job_id, mode) + + yield stream + + stream.close + end + + def exist?(job_id) + ChunkStores::Redis.chunks_count(job_id) > 0 || + ChunkStores::Database.chunks_count(job_id) > 0 + end + end + + def initialize(job_id, mode) + super(job_id, calculate_size, mode) + end + + def write(data) + raise NotImplementedError, 'Overwrite is not supported' unless tell == size + + super(data) do |store| + if store.filled? + # Rotate data from redis to database + ChunkStores::Database.open(job_id, chunk_index, params_for_store) do |to_store| + to_store.write!(store.get) + end + + store.delete! + end + end + end + + private + + def calculate_size + ChunkStores::Redis.chunks_size(job_id) + + ChunkStores::Database.chunks_size(job_id) + end + + def chunk_store + if last_chunk? + ChunkStores::Redis + else + ChunkStores::Database + end + end + end + end + end + end +end diff --git a/lib/gitlab/ci/trace/file/remote.rb b/lib/gitlab/ci/trace/file/remote.rb new file mode 100644 index 000000000000..77b5a83717ac --- /dev/null +++ b/lib/gitlab/ci/trace/file/remote.rb @@ -0,0 +1,55 @@ +module Gitlab + module Ci + class Trace + module File + class Remote < ChunkedIO + BUFFER_SIZE = 128.kilobytes + + class << self + def open(job_id, url, size, mode) + stream = self.class.new(job_id, mode) + + yield stream + + stream.close + end + end + + InvalidURLError = Class.new(StandardError) + + attr_reader :uri + + def initialize(job_id, url, size, mode) + raise InvalidURLError unless ::Gitlab::UrlSanitizer.valid?(url) + + @uri = URI(url) + + super(job_id, size, mode) + end + + def write(data) + raise NotImplementedError + end + + def truncate(offset) + raise NotImplementedError + end + + def flush + raise NotImplementedError + end + + private + + def chunk_store + ChunkStores::ObjectStorage + end + + def params_for_store + super.merge( { uri: uri } ) + end + end + end + end + end +end diff --git a/lib/gitlab/ci/trace/live_trace.rb b/lib/gitlab/ci/trace/live_trace.rb deleted file mode 100644 index 667fdec557e0..000000000000 --- a/lib/gitlab/ci/trace/live_trace.rb +++ /dev/null @@ -1,50 +0,0 @@ -module Gitlab - module Ci - class Trace - class LiveTrace < ChunkedIO - BUFFER_SIZE = 128.kilobytes - - class << self - def exist?(job_id) - ChunkStores::Redis.chunks_count(job_id) > 0 || - ChunkStores::Database.chunks_count(job_id) > 0 - end - end - - def initialize(job_id, mode) - super(job_id, calculate_size, mode) - end - - def write(data) - raise NotImplementedError, 'Overwrite is not supported' unless tell == size - - super(data) do |store| - if store.filled? - # Rotate data from redis to database - ChunkStores::Database.open(job_id, chunk_index, params_for_store) do |to_store| - to_store.write!(store.get) - end - - store.delete! - end - end - end - - private - - def calculate_size - ChunkStores::Redis.chunks_size(job_id) + - ChunkStores::Database.chunks_size(job_id) - end - - def chunk_store - if last_chunk? - ChunkStores::Redis - else - ChunkStores::Database - end - end - end - end - end -end diff --git a/lib/gitlab/ci/trace/remote.rb b/lib/gitlab/ci/trace/remote.rb deleted file mode 100644 index ea27fdc27187..000000000000 --- a/lib/gitlab/ci/trace/remote.rb +++ /dev/null @@ -1,41 +0,0 @@ -module Gitlab - module Ci - class Trace - class Remote < ChunkedIO - BUFFER_SIZE = 128.kilobytes - - NoSupportError = Class.new(StandardError) - - attr_reader :uri - - def initialize(job_id, url, size, mode) - @uri = URI(url) - - super(job_id, size, mode) - end - - def write(data) - raise NoSupportError - end - - def truncate(offset) - raise NoSupportError - end - - def flush - raise NoSupportError - end - - private - - def chunk_store - ChunkStores::Http - end - - def params_for_store - super.merge( { uri: uri } ) - end - end - end - end -end -- GitLab From 3868a847954f5970a3d68a9da46adf547ca84847 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Thu, 29 Mar 2018 03:49:52 +0900 Subject: [PATCH 06/86] Reorgnize archetecture --- .../chunk_store/base.rb | 0 .../chunk_store/database.rb | 0 .../chunk_store/object_storage.rb | 0 .../chunk_store/redis.rb | 0 .../ci/trace/chunked_file/chunked_io.rb | 230 ++++++++++++++++++ .../{file => chunked_file}/live_trace.rb | 2 +- .../ci/trace/{file => chunked_file}/remote.rb | 2 +- lib/gitlab/ci/trace/chunked_io.rb | 228 ----------------- 8 files changed, 232 insertions(+), 230 deletions(-) rename lib/gitlab/ci/trace/{file => chunked_file}/chunk_store/base.rb (100%) rename lib/gitlab/ci/trace/{file => chunked_file}/chunk_store/database.rb (100%) rename lib/gitlab/ci/trace/{file => chunked_file}/chunk_store/object_storage.rb (100%) rename lib/gitlab/ci/trace/{file => chunked_file}/chunk_store/redis.rb (100%) create mode 100644 lib/gitlab/ci/trace/chunked_file/chunked_io.rb rename lib/gitlab/ci/trace/{file => chunked_file}/live_trace.rb (98%) rename lib/gitlab/ci/trace/{file => chunked_file}/remote.rb (97%) delete mode 100644 lib/gitlab/ci/trace/chunked_io.rb diff --git a/lib/gitlab/ci/trace/file/chunk_store/base.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb similarity index 100% rename from lib/gitlab/ci/trace/file/chunk_store/base.rb rename to lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb diff --git a/lib/gitlab/ci/trace/file/chunk_store/database.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb similarity index 100% rename from lib/gitlab/ci/trace/file/chunk_store/database.rb rename to lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb diff --git a/lib/gitlab/ci/trace/file/chunk_store/object_storage.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/object_storage.rb similarity index 100% rename from lib/gitlab/ci/trace/file/chunk_store/object_storage.rb rename to lib/gitlab/ci/trace/chunked_file/chunk_store/object_storage.rb diff --git a/lib/gitlab/ci/trace/file/chunk_store/redis.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb similarity index 100% rename from lib/gitlab/ci/trace/file/chunk_store/redis.rb rename to lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb diff --git a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb new file mode 100644 index 000000000000..eb911d0a09a7 --- /dev/null +++ b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb @@ -0,0 +1,230 @@ +## +# This class is compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html) +# source: https://gitlab.com/snippets/1685610 +module Gitlab + module Ci + class Trace + module ChunkedFile + class ChunkedIO + WriteError = Class.new(StandardError) + + attr_reader :size + attr_reader :tell + attr_reader :chunk, :chunk_range + attr_reader :write_lock_uuid + attr_reader :job_id + + alias_method :pos, :tell + + def initialize(job_id, size, mode) + @size = size + @tell = 0 + @job_id = job_id + + if /(w|a)/ =~ mode + @write_lock_uuid = Gitlab::ExclusiveLease.new(write_lock_key, timeout: 5.minutes.to_i).try_obtain + raise WriteError, 'Already opened by another process' unless write_lock_uuid + end + end + + def close + Gitlab::ExclusiveLease.cancel(write_lock_key, write_lock_uuid) if write_lock_uuid + end + + def binmode + # no-op + end + + def binmode? + true + end + + def path + nil + end + + def seek(pos, where = IO::SEEK_SET) + new_pos = + case where + when IO::SEEK_END + size + pos + when IO::SEEK_SET + pos + when IO::SEEK_CUR + tell + pos + else + -1 + end + + raise 'new position is outside of file' if new_pos < 0 || new_pos > size + + @tell = new_pos + end + + def eof? + tell == size + end + + def each_line + until eof? + line = readline + break if line.nil? + + yield(line) + end + end + + def read(length = nil) + out = "" + + until eof? || (length && out.length >= length) + data = get_chunk + break if data.empty? + + out << data + @tell += data.bytesize + end + + out = out[0, length] if length && out.length > length + + out + end + + def readline + out = "" + + until eof? + data = get_chunk + new_line = data.index("\n") + + if !new_line.nil? + out << data[0..new_line] + @tell += new_line + 1 + break + else + out << data + @tell += data.bytesize + end + end + + out + end + + def write(data, &block) + raise WriteError, 'Already opened by another process' unless write_lock_uuid + + while data.present? + empty_space = BUFFER_SIZE - chunk_offset + + chunk_store.open(job_id, chunk_index, params_for_store) do |store| + data_to_write = '' + data_to_write += store.get if store.size > 0 + data_to_write += data.slice!(empty_space) + + written_size = store.write!(data_to_write) + + raise WriteError, 'Written size mismatch' unless data_to_write.length == written_size + + block.call(store, chunk_index) if block_given? + + @tell += written_size + @size += written_size + end + end + end + + def truncate(offset) + raise WriteError, 'Already opened by another process' unless write_lock_uuid + + removal_chunk_index_start = (offset / BUFFER_SIZE) + removal_chunk_index_end = chunks_count - 1 + removal_chunk_offset = offset % BUFFER_SIZE + + if removal_chunk_offset > 0 + chunk_store.open(job_id, removal_chunk_index_start, params_for_store) do |store| + store.truncate!(removal_chunk_offset) + end + + removal_chunk_index_start += 1 + end + + (removal_chunk_index_start..removal_chunk_index_end).each do |removal_chunk_index| + chunk_store.open(job_id, removal_chunk_index, params_for_store) do |store| + store.delete! + end + end + + @tell = @size = offset + end + + def flush + # no-op + end + + def present? + true + end + + def delete_chunks! + truncate(0) + end + + private + + ## + # The below methods are not implemented in IO class + # + def in_range? + @chunk_range&.include?(tell) + end + + def get_chunk + unless in_range? + chunk_store.open(job_id, chunk_index, params_for_store) do |store| + @chunk = store.get + @chunk_range = (chunk_start...(chunk_start + @chunk.length)) + end + end + + @chunk[chunk_offset..BUFFER_SIZE] + end + + def params_for_store + { + buffer_size: BUFFER_SIZE, + chunk_start: chunk_start + } + end + + def chunk_offset + tell % BUFFER_SIZE + end + + def chunk_start + (tell / BUFFER_SIZE) * BUFFER_SIZE + end + + def chunk_end + [chunk_start + BUFFER_SIZE, size].min + end + + def chunk_index + (tell / BUFFER_SIZE) + end + + def chunks_count + (size / BUFFER_SIZE) + 1 + end + + def last_chunk? + chunk_index == (chunks_count - 1) + end + + def write_lock_key + "live_trace:operation:write:#{job_id}" + end + end + end + end + end +end diff --git a/lib/gitlab/ci/trace/file/live_trace.rb b/lib/gitlab/ci/trace/chunked_file/live_trace.rb similarity index 98% rename from lib/gitlab/ci/trace/file/live_trace.rb rename to lib/gitlab/ci/trace/chunked_file/live_trace.rb index 0a5b855d55e9..4c6bae5896cd 100644 --- a/lib/gitlab/ci/trace/file/live_trace.rb +++ b/lib/gitlab/ci/trace/chunked_file/live_trace.rb @@ -1,7 +1,7 @@ module Gitlab module Ci class Trace - module File + module ChunkedFile class LiveTrace < ChunkedIO BUFFER_SIZE = 128.kilobytes diff --git a/lib/gitlab/ci/trace/file/remote.rb b/lib/gitlab/ci/trace/chunked_file/remote.rb similarity index 97% rename from lib/gitlab/ci/trace/file/remote.rb rename to lib/gitlab/ci/trace/chunked_file/remote.rb index 77b5a83717ac..56ff05c74943 100644 --- a/lib/gitlab/ci/trace/file/remote.rb +++ b/lib/gitlab/ci/trace/chunked_file/remote.rb @@ -1,7 +1,7 @@ module Gitlab module Ci class Trace - module File + module ChunkedFile class Remote < ChunkedIO BUFFER_SIZE = 128.kilobytes diff --git a/lib/gitlab/ci/trace/chunked_io.rb b/lib/gitlab/ci/trace/chunked_io.rb deleted file mode 100644 index 7f1d59dd6d53..000000000000 --- a/lib/gitlab/ci/trace/chunked_io.rb +++ /dev/null @@ -1,228 +0,0 @@ -## -# This class is compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html) -# source: https://gitlab.com/snippets/1685610 -module Gitlab - module Ci - class Trace - class ChunkedIO - WriteError = Class.new(StandardError) - - attr_reader :size - attr_reader :tell - attr_reader :chunk, :chunk_range - attr_reader :write_lock_uuid - attr_reader :job_id - - alias_method :pos, :tell - - def initialize(job_id, size, mode) - @size = size - @tell = 0 - @job_id = job_id - - if /(w|a)/ =~ mode - @write_lock_uuid = Gitlab::ExclusiveLease.new(write_lock_key, timeout: 5.minutes.to_i).try_obtain - raise WriteError, 'Already opened by another process' unless write_lock_uuid - end - end - - def close - Gitlab::ExclusiveLease.cancel(write_lock_key, write_lock_uuid) if write_lock_uuid - end - - def binmode - # no-op - end - - def binmode? - true - end - - def path - nil - end - - def seek(pos, where = IO::SEEK_SET) - new_pos = - case where - when IO::SEEK_END - size + pos - when IO::SEEK_SET - pos - when IO::SEEK_CUR - tell + pos - else - -1 - end - - raise 'new position is outside of file' if new_pos < 0 || new_pos > size - - @tell = new_pos - end - - def eof? - tell == size - end - - def each_line - until eof? - line = readline - break if line.nil? - - yield(line) - end - end - - def read(length = nil) - out = "" - - until eof? || (length && out.length >= length) - data = get_chunk - break if data.empty? - - out << data - @tell += data.bytesize - end - - out = out[0, length] if length && out.length > length - - out - end - - def readline - out = "" - - until eof? - data = get_chunk - new_line = data.index("\n") - - if !new_line.nil? - out << data[0..new_line] - @tell += new_line + 1 - break - else - out << data - @tell += data.bytesize - end - end - - out - end - - def write(data, &block) - raise WriteError, 'Already opened by another process' unless write_lock_uuid - - while data.present? - empty_space = BUFFER_SIZE - chunk_offset - - chunk_store.open(job_id, chunk_index, params_for_store) do |store| - data_to_write = '' - data_to_write += store.get if store.size > 0 - data_to_write += data.slice!(empty_space) - - written_size = store.write!(data_to_write) - - raise WriteError, 'Written size mismatch' unless data_to_write.length == written_size - - block.call(store, chunk_index) if block_given? - - @tell += written_size - @size += written_size - end - end - end - - def truncate(offset) - raise WriteError, 'Already opened by another process' unless write_lock_uuid - - removal_chunk_index_start = (offset / BUFFER_SIZE) - removal_chunk_index_end = chunks_count - 1 - removal_chunk_offset = offset % BUFFER_SIZE - - if removal_chunk_offset > 0 - chunk_store.open(job_id, removal_chunk_index_start, params_for_store) do |store| - store.truncate!(removal_chunk_offset) - end - - removal_chunk_index_start += 1 - end - - (removal_chunk_index_start..removal_chunk_index_end).each do |removal_chunk_index| - chunk_store.open(job_id, removal_chunk_index, params_for_store) do |store| - store.delete! - end - end - - @tell = @size = offset - end - - def flush - # no-op - end - - def present? - true - end - - def delete_chunks! - truncate(0) - end - - private - - ## - # The below methods are not implemented in IO class - # - def in_range? - @chunk_range&.include?(tell) - end - - def get_chunk - unless in_range? - chunk_store.open(job_id, chunk_index, params_for_store) do |store| - @chunk = store.get - @chunk_range = (chunk_start...(chunk_start + @chunk.length)) - end - end - - @chunk[chunk_offset..BUFFER_SIZE] - end - - def params_for_store - { - buffer_size: BUFFER_SIZE, - chunk_start: chunk_start - } - end - - def chunk_offset - tell % BUFFER_SIZE - end - - def chunk_start - (tell / BUFFER_SIZE) * BUFFER_SIZE - end - - def chunk_end - [chunk_start + BUFFER_SIZE, size].min - end - - def chunk_index - (tell / BUFFER_SIZE) - end - - def chunks_count - (size / BUFFER_SIZE) + 1 - end - - def last_chunk? - chunk_index == (chunks_count - 1) - end - - def write_lock_key - "live_trace:operation:write:#{job_id}" - end - end - end - end -end -- GitLab From f49aea7522b6d90eef5abd06c0eb98276b128775 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Thu, 29 Mar 2018 05:40:48 +0900 Subject: [PATCH 07/86] Add spec for ChunkStore::Redis --- .../ci/trace/chunked_file/chunk_store/base.rb | 2 +- .../chunked_file/chunk_store/database.rb | 2 +- .../chunk_store/object_storage.rb | 2 +- .../trace/chunked_file/chunk_store/redis.rb | 15 +- .../ci/trace/chunked_file/chunked_io.rb | 2 +- .../chunked_file/chunk_store/redis_spec.rb | 276 ++++++++++++++++++ 6 files changed, 290 insertions(+), 9 deletions(-) create mode 100644 spec/lib/gitlab/ci/trace/chunked_file/chunk_store/redis_spec.rb diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb index ec3748b553f0..245c27528bbb 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb @@ -1,7 +1,7 @@ module Gitlab module Ci class Trace - module File + module ChunkedFile module ChunkStore class Base InitializeError = Class.new(StandardError) diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb index 018675e26c73..bc7dc8a7f636 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb @@ -1,7 +1,7 @@ module Gitlab module Ci class Trace - module File + module ChunkedFile module ChunkStore class Database < Base class << self diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/object_storage.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/object_storage.rb index 28ce0f2afc1e..18b00e5d044b 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/object_storage.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/object_storage.rb @@ -1,7 +1,7 @@ module Gitlab module Ci class Trace - module File + module ChunkedFile module ChunkStore class ObjectStorage < Base class << self diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb index c525bcb797f9..64625ee33e28 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb @@ -1,14 +1,14 @@ module Gitlab module Ci class Trace - module File + module ChunkedFile module ChunkStore class Redis < Base class << self def open(job_id, chunk_index, **params) raise ArgumentError unless job_id && chunk_index - yield self.class.new(self.buffer_key(job_id, chunk_index), params) + yield self.new(self.buffer_key(job_id, chunk_index), params) end def exist?(job_id, chunk_index) @@ -19,14 +19,16 @@ def exist?(job_id, chunk_index) def chunks_count(job_id) Gitlab::Redis::Cache.with do |redis| - redis.keys(buffer_key(job_id, '*')).count + redis.scan_each(:match => buffer_key(job_id, '?')).inject(0) do |sum, key| + sum + 1 + end end end def chunks_size(job_id) Gitlab::Redis::Cache.with do |redis| - redis.keys(buffer_key(job_id, '*')).inject(0) do |sum, key| - sum + redis.strlen(key) + redis.scan_each(:match => buffer_key(job_id, '?')).inject(0) do |sum, key| + sum += redis.strlen(key) end end end @@ -59,11 +61,14 @@ def size def write!(data) Gitlab::Redis::Cache.with do |redis| redis.set(buffer_key, data) + redis.strlen(buffer_key) end end def truncate!(offset) Gitlab::Redis::Cache.with do |redis| + return unless redis.exists(buffer_key) + truncated_data = redis.getrange(buffer_key, 0, offset) redis.set(buffer_key, truncated_data) end diff --git a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb index eb911d0a09a7..08bc260a164a 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb @@ -119,7 +119,7 @@ def write(data, &block) chunk_store.open(job_id, chunk_index, params_for_store) do |store| data_to_write = '' data_to_write += store.get if store.size > 0 - data_to_write += data.slice!(empty_space) + data_to_write += data.slice!(0..empty_space) written_size = store.write!(data_to_write) diff --git a/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/redis_spec.rb b/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/redis_spec.rb new file mode 100644 index 000000000000..ad5ca44b74cc --- /dev/null +++ b/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/redis_spec.rb @@ -0,0 +1,276 @@ +require 'spec_helper' + +describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_cache do + let(:job_id) { 1 } + let(:buffer_size) { 128.kilobytes } + let(:chunk_index) { 0 } + let(:buffer_key) { described_class.buffer_key(job_id, chunk_index) } + let(:params) { { buffer_size: buffer_size } } + let(:trace) { 'Here is the trace' } + + describe '.open' do + subject { described_class.open(job_id, chunk_index, params) } + + it 'opens' do + expect { |b| described_class.open(job_id, chunk_index, params, &b) } + .to yield_successive_args(described_class) + end + + context 'when job_id is nil' do + let(:job_id) { nil } + + it { expect { subject }.to raise_error(ArgumentError) } + end + + context 'when chunk_index is nil' do + let(:chunk_index) { nil } + + it { expect { subject }.to raise_error(ArgumentError) } + end + end + + describe '.exist?' do + subject { described_class.exist?(job_id, chunk_index) } + + context 'when buffer_key exists' do + before do + described_class.new(buffer_key, params).write!(trace) + end + + it { is_expected.to be_truthy } + end + + context 'when buffer_key does not exist' do + it { is_expected.to be_falsy } + end + end + + describe '.chunks_count' do + subject { described_class.chunks_count(job_id) } + + context 'when buffer_key exists' do + before do + described_class.new(buffer_key, params).write!(trace) + end + + it { is_expected.to eq(1) } + + context 'when two chunks exists' do + let(:buffer_key_2) { described_class.buffer_key(job_id, chunk_index + 1) } + let(:trace_2) { 'Another trace' } + + before do + described_class.new(buffer_key_2, params).write!(trace_2) + end + + it { is_expected.to eq(2) } + end + end + + context 'when buffer_key does not exist' do + it { is_expected.to eq(0) } + end + end + + describe '.chunks_size' do + subject { described_class.chunks_size(job_id) } + + context 'when buffer_key exists' do + before do + described_class.new(buffer_key, params).write!(trace) + end + + it { is_expected.to eq(trace.length) } + + context 'when two chunks exists' do + let(:buffer_key_2) { described_class.buffer_key(job_id, chunk_index + 1) } + let(:trace_2) { 'Another trace' } + let(:chunks_size) { trace.length + trace_2.length } + + before do + described_class.new(buffer_key_2, params).write!(trace_2) + end + + it { is_expected.to eq(chunks_size) } + end + end + + context 'when buffer_key does not exist' do + it { is_expected.to eq(0) } + end + end + + describe '.buffer_key' do + subject { described_class.buffer_key(job_id, chunk_index) } + + it { is_expected.to eq("live_trace_buffer:#{job_id}:#{chunk_index}") } + end + + describe '#get' do + subject { described_class.new(buffer_key, params).get } + + context 'when buffer_key exists' do + before do + Gitlab::Redis::Cache.with do |redis| + redis.set(buffer_key, trace) + end + end + + it { is_expected.to eq(trace) } + end + + context 'when buffer_key does not exist' do + it { is_expected.not_to eq(trace) } + end + end + + describe '#size' do + subject { described_class.new(buffer_key, params).size } + + context 'when buffer_key exists' do + before do + Gitlab::Redis::Cache.with do |redis| + redis.set(buffer_key, trace) + end + end + + it { is_expected.to eq(trace.length) } + end + + context 'when buffer_key does not exist' do + it { is_expected.to eq(0) } + end + end + + describe '#write!' do + subject { described_class.new(buffer_key, params).write!(trace) } + + context 'when buffer_key exists' do + before do + Gitlab::Redis::Cache.with do |redis| + redis.set(buffer_key, 'Already data in the chunk') + end + end + + it 'overwrites' do + is_expected.to eq(trace.length) + + Gitlab::Redis::Cache.with do |redis| + expect(redis.get(buffer_key)).to eq(trace) + end + end + end + + context 'when buffer_key does not exist' do + it 'writes' do + is_expected.to eq(trace.length) + + Gitlab::Redis::Cache.with do |redis| + expect(redis.get(buffer_key)).to eq(trace) + end + end + end + + context 'when data is nil' do + let(:trace) { nil } + + it 'clears value' do + is_expected.to eq(0) + end + end + end + + describe '#truncate!' do + subject { described_class.new(buffer_key, params).truncate!(offset) } + + let(:offset) { 5 } + + context 'when buffer_key exists' do + before do + Gitlab::Redis::Cache.with do |redis| + redis.set(buffer_key, trace) + end + end + + it 'truncates' do + Gitlab::Redis::Cache.with do |redis| + expect(redis.get(buffer_key)).to eq(trace) + end + + subject + + Gitlab::Redis::Cache.with do |redis| + expect(redis.get(buffer_key)).to eq(trace.slice(0..offset)) + end + end + + context 'when offset is larger than data size' do + let(:offset) { 100 } + + it 'truncates' do + Gitlab::Redis::Cache.with do |redis| + expect(redis.get(buffer_key)).to eq(trace) + end + + subject + + Gitlab::Redis::Cache.with do |redis| + expect(redis.get(buffer_key)).to eq(trace.slice(0..offset)) + end + end + end + end + + context 'when buffer_key does not exist' do + it 'truncates' do + Gitlab::Redis::Cache.with do |redis| + expect(redis.get(buffer_key)).to be_nil + end + + subject + + Gitlab::Redis::Cache.with do |redis| + expect(redis.get(buffer_key)).to be_nil + end + end + end + end + + describe '#delete!' do + subject { described_class.new(buffer_key, params).delete! } + + context 'when buffer_key exists' do + before do + Gitlab::Redis::Cache.with do |redis| + redis.set(buffer_key, trace) + end + end + + it 'deletes' do + Gitlab::Redis::Cache.with do |redis| + expect(redis.exists(buffer_key)).to be_truthy + end + + subject + + Gitlab::Redis::Cache.with do |redis| + expect(redis.exists(buffer_key)).to be_falsy + end + end + end + + context 'when buffer_key does not exist' do + it 'deletes' do + Gitlab::Redis::Cache.with do |redis| + expect(redis.exists(buffer_key)).to be_falsy + end + + subject + + Gitlab::Redis::Cache.with do |redis| + expect(redis.exists(buffer_key)).to be_falsy + end + end + end + end +end -- GitLab From 1108df181ccba91454cbe122bcf7af05c6d76fe3 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Thu, 29 Mar 2018 17:43:32 +0900 Subject: [PATCH 08/86] Add chunks_store database spec --- app/uploaders/job_artifact_uploader.rb | 2 +- lib/gitlab/ci/trace.rb | 27 ++- .../ci/trace/chunked_file/chunk_store/base.rb | 7 +- .../chunked_file/chunk_store/database.rb | 38 ++-- .../chunk_store/object_storage.rb | 27 ++- .../trace/chunked_file/chunk_store/redis.rb | 11 +- .../ci/trace/chunked_file/chunked_io.rb | 2 +- .../ci/trace/chunked_file/live_trace.rb | 2 +- spec/factories/ci/job_trace_chunks.rb | 7 + .../chunked_file/chunk_store/database_spec.rb | 201 ++++++++++++++++++ .../chunked_file/chunk_store/redis_spec.rb | 24 +-- 11 files changed, 301 insertions(+), 47 deletions(-) create mode 100644 spec/factories/ci/job_trace_chunks.rb create mode 100644 spec/lib/gitlab/ci/trace/chunked_file/chunk_store/database_spec.rb diff --git a/app/uploaders/job_artifact_uploader.rb b/app/uploaders/job_artifact_uploader.rb index d7313b65069c..ef0f8acefd69 100644 --- a/app/uploaders/job_artifact_uploader.rb +++ b/app/uploaders/job_artifact_uploader.rb @@ -18,7 +18,7 @@ def open if file_storage? File.open(path, "rb") if path else - ::Gitlab::Ci::Trace::RemoteFile.new(model.job_id, url, size, "rb") if url + ::Gitlab::Ci::Trace::HttpIO.new(url, size) if url end end diff --git a/lib/gitlab/ci/trace.rb b/lib/gitlab/ci/trace.rb index c36c6f557e48..49f31352a659 100644 --- a/lib/gitlab/ci/trace.rb +++ b/lib/gitlab/ci/trace.rb @@ -61,7 +61,7 @@ def read stream = Gitlab::Ci::Trace::Stream.new do if trace_artifact trace_artifact.open - elsif LiveTraceFile.exists?(job.id) + elsif Feature.enabled?('ci_enable_live_trace') && LiveTraceFile.exists?(job.id) LiveTraceFile.new(job.id, "rb") elsif current_path File.open(current_path, "rb") @@ -77,10 +77,14 @@ def read def write stream = Gitlab::Ci::Trace::Stream.new do - if current_path - current_path + if Feature.enabled?('ci_enable_live_trace') + if current_path + current_path + else + LiveTraceFile.new(job.id, "a+b") + end else - LiveTraceFile.new(job.id, "a+b") + File.open(ensure_path, "a+b") end end @@ -105,7 +109,7 @@ def archive! raise ArchiveError, 'Already archived' if trace_artifact raise ArchiveError, 'Job is not finished yet' unless job.complete? - if LiveTraceFile.exists?(job.id) + if Feature.enabled?('ci_enable_live_trace') && LiveTraceFile.exists?(job.id) LiveTraceFile.open(job.id, "wb") do |stream| archive_stream!(stream) stream.truncate(0) @@ -153,6 +157,19 @@ def create_job_trace!(job, path) end end + def ensure_path + return current_path if current_path + + ensure_directory + default_path + end + + def ensure_directory + unless Dir.exist?(default_directory) + FileUtils.mkdir_p(default_directory) + end + end + def current_path @current_path ||= paths.find do |trace_path| File.exist?(trace_path) diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb index 245c27528bbb..890cf26cba2f 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb @@ -4,9 +4,6 @@ class Trace module ChunkedFile module ChunkStore class Base - InitializeError = Class.new(StandardError) - NotSupportedError = Class.new(StandardError) - attr_reader :buffer_size attr_reader :chunk_start attr_reader :url @@ -17,6 +14,10 @@ def initialize(*identifiers, **params) @url = params[:url] end + def close + raise NotImplementedError + end + def get raise NotImplementedError end diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb index bc7dc8a7f636..b3a1c4734d32 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb @@ -8,45 +8,55 @@ class << self def open(job_id, chunk_index, **params) raise ArgumentError unless job_id && chunk_index - job = Ci::JobTraceChunk.find_or_initialize_by(job_id: job_id, chunk_index: chunk_index) + job_trace_chunk = ::Ci::JobTraceChunk + .find_or_initialize_by(job_id: job_id, chunk_index: chunk_index) + store = self.new(job_trace_chunk, params) - yield self.class.new(job, params) + yield store + ensure + store&.close end def exist?(job_id, chunk_index) - Ci::JobTraceChunk.exists?(job_id: job_id, chunk_index: chunk_index) + ::Ci::JobTraceChunk.exists?(job_id: job_id, chunk_index: chunk_index) end def chunks_count(job_id) - Ci::JobTraceChunk.where(job_id: job_id).count + ::Ci::JobTraceChunk.where(job_id: job_id).count end def chunks_size(job_id) - Ci::JobTraceChunk.where(job_id: job_id).pluck('len(data)') - .inject(0){ |sum, data_length| sum + data_length } + ::Ci::JobTraceChunk.where(job_id: job_id).pluck('data') + .inject(0) { |sum, data| sum + data.length } end end - attr_reader :job + attr_reader :job_trace_chunk - def initialize(job, **params) + def initialize(job_trace_chunk, **params) super - @job = job + @job_trace_chunk = job_trace_chunk + end + + def close + @job_trace_chunk = nil end def get - job.data + job_trace_chunk.data end def size - job.data&.length || 0 + job_trace_chunk.data&.length || 0 end def write!(data) - raise NotImplementedError, 'Only full size write is supported' unless buffer_size == data.length + raise NotImplementedError, 'Partial write is not supported' unless buffer_size == data&.length + raise NotImplementedError, 'UPDATE is not supported' if job_trace_chunk.data - job.create!(data: data) + job_trace_chunk.data = data + job_trace_chunk.save! data.length end @@ -56,7 +66,7 @@ def truncate!(offset) end def delete! - job.destroy! + job_trace_chunk.destroy! end end end diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/object_storage.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/object_storage.rb index 18b00e5d044b..f144d670d032 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/object_storage.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/object_storage.rb @@ -8,24 +8,42 @@ class << self def open(job_id, chunk_index, **params) raise ArgumentError unless job_id && chunk_index - yield self.class.new(params) + relative_path = relative_path(job_id, chunk_index) + store = self.new(relative_path, params) + + yield store + ensure + store&.close end def exist?(job_id, chunk_index) - raise NotSupportedError + raise NotImplementedError end def chunks_count(job_id) - raise NotSupportedError + raise NotImplementedError + end + + def relative_path(job_id, chunk_index) + "#{job_id}/#{chunk_index}.chunk" end end FailedToGetChunkError = Class.new(StandardError) - def initialize(**params) + attr_reader :relative_path + + def initialize(relative_path, **params) super + + @relative_path = relative_path + end + + def close + @relative_path = nil end + ## TODO: Carrierwave::Fog integration def get response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == 'https') do |http| request = Net::HTTP::Get.new(uri) @@ -43,6 +61,7 @@ def size end def write!(data) + raise NotImplementedError, 'Partial write is not supported' unless buffer_size == data.length raise NotImplementedError end diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb index 64625ee33e28..6fa27b2c196f 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb @@ -8,7 +8,12 @@ class << self def open(job_id, chunk_index, **params) raise ArgumentError unless job_id && chunk_index - yield self.new(self.buffer_key(job_id, chunk_index), params) + buffer_key = self.buffer_key(job_id, chunk_index) + store = self.new(buffer_key, params) + + yield store + ensure + store&.close end def exist?(job_id, chunk_index) @@ -46,6 +51,10 @@ def initialize(buffer_key, **params) @buffer_key = buffer_key end + def close + @buffer_key = nil + end + def get Gitlab::Redis::Cache.with do |redis| redis.get(buffer_key) diff --git a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb index 08bc260a164a..5c36158dbea1 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb @@ -22,7 +22,7 @@ def initialize(job_id, size, mode) @job_id = job_id if /(w|a)/ =~ mode - @write_lock_uuid = Gitlab::ExclusiveLease.new(write_lock_key, timeout: 5.minutes.to_i).try_obtain + @write_lock_uuid = Gitlab::ExclusiveLease.new(write_lock_key, timeout: 1.hour.to_i).try_obtain raise WriteError, 'Already opened by another process' unless write_lock_uuid end end diff --git a/lib/gitlab/ci/trace/chunked_file/live_trace.rb b/lib/gitlab/ci/trace/chunked_file/live_trace.rb index 4c6bae5896cd..7c39561bec70 100644 --- a/lib/gitlab/ci/trace/chunked_file/live_trace.rb +++ b/lib/gitlab/ci/trace/chunked_file/live_trace.rb @@ -10,7 +10,7 @@ def open(job_id, mode) stream = self.class.new(job_id, mode) yield stream - + ensure stream.close end diff --git a/spec/factories/ci/job_trace_chunks.rb b/spec/factories/ci/job_trace_chunks.rb new file mode 100644 index 000000000000..c7fe1921f3aa --- /dev/null +++ b/spec/factories/ci/job_trace_chunks.rb @@ -0,0 +1,7 @@ +include ActionDispatch::TestProcess + +FactoryBot.define do + factory :job_trace_chunk, class: Ci::JobTraceChunk do + job factory: :ci_build + end +end diff --git a/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/database_spec.rb b/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/database_spec.rb new file mode 100644 index 000000000000..74fb81d7a535 --- /dev/null +++ b/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/database_spec.rb @@ -0,0 +1,201 @@ +require 'spec_helper' + +describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database do + let(:job_id) { job.id } + let(:chunk_index) { 0 } + let(:buffer_size) { 256 } + let(:job_trace_chunk) { ::Ci::JobTraceChunk.new(job_id: job_id, chunk_index: chunk_index) } + let(:params) { { buffer_size: buffer_size } } + let(:trace) { 'A' * buffer_size } + let(:job) { create(:ci_build) } + + describe '.open' do + subject { described_class.open(job_id, chunk_index, params) } + + it 'opens' do + expect { |b| described_class.open(job_id, chunk_index, params, &b) } + .to yield_successive_args(described_class) + end + + context 'when job_id is nil' do + let(:job_id) { nil } + + it { expect { subject }.to raise_error(ArgumentError) } + end + + context 'when chunk_index is nil' do + let(:chunk_index) { nil } + + it { expect { subject }.to raise_error(ArgumentError) } + end + end + + describe '.exist?' do + subject { described_class.exist?(job_id, chunk_index) } + + context 'when job_trace_chunk exists' do + before do + described_class.new(job_trace_chunk, params).write!(trace) + end + + it { is_expected.to be_truthy } + end + + context 'when job_trace_chunk does not exist' do + it { is_expected.to be_falsy } + end + end + + describe '.chunks_count' do + subject { described_class.chunks_count(job_id) } + + context 'when job_trace_chunk exists' do + before do + described_class.new(job_trace_chunk, params).write!(trace) + end + + it { is_expected.to eq(1) } + + context 'when two chunks exists' do + let(:job_trace_chunk_2) { ::Ci::JobTraceChunk.new(job_id: job_id, chunk_index: chunk_index + 1) } + let(:trace_2) { 'B' * buffer_size } + + before do + described_class.new(job_trace_chunk_2, params).write!(trace_2) + end + + it { is_expected.to eq(2) } + end + end + + context 'when job_trace_chunk does not exist' do + it { is_expected.to eq(0) } + end + end + + describe '.chunks_size' do + subject { described_class.chunks_size(job_id) } + + context 'when job_trace_chunk exists' do + before do + described_class.new(job_trace_chunk, params).write!(trace) + end + + it { is_expected.to eq(trace.length) } + + context 'when two chunks exists' do + let(:job_trace_chunk_2) { ::Ci::JobTraceChunk.new(job_id: job_id, chunk_index: chunk_index + 1) } + let(:trace_2) { 'B' * buffer_size } + let(:chunks_size) { trace.length + trace_2.length } + + before do + described_class.new(job_trace_chunk_2, params).write!(trace_2) + end + + it { is_expected.to eq(chunks_size) } + end + end + + context 'when job_trace_chunk does not exist' do + it { is_expected.to eq(0) } + end + end + + describe '#get' do + subject { described_class.new(job_trace_chunk, params).get } + + context 'when job_trace_chunk exists' do + before do + described_class.new(job_trace_chunk, params).write!(trace) + end + + it { is_expected.to eq(trace) } + end + + context 'when job_trace_chunk does not exist' do + it { is_expected.to be_nil } + end + end + + describe '#size' do + subject { described_class.new(job_trace_chunk, params).size } + + context 'when job_trace_chunk exists' do + before do + described_class.new(job_trace_chunk, params).write!(trace) + end + + it { is_expected.to eq(trace.length) } + end + + context 'when job_trace_chunk does not exist' do + it { is_expected.to eq(0) } + end + end + + describe '#write!' do + subject { described_class.new(job_trace_chunk, params).write!(trace) } + + context 'when job_trace_chunk exists' do + before do + described_class.new(job_trace_chunk, params).write!(trace) + end + + it { expect { subject }.to raise_error('UPDATE is not supported') } + end + + context 'when job_trace_chunk does not exist' do + let(:expected_data) { ::Ci::JobTraceChunk.find_by(job_id: job_id, chunk_index: chunk_index).data } + + it 'writes' do + is_expected.to eq(trace.length) + + expect(expected_data).to eq(trace) + end + end + + context 'when data is nil' do + let(:trace) { nil } + + it { expect { subject }.to raise_error('Partial write is not supported') } + end + end + + describe '#truncate!' do + subject { described_class.new(job_trace_chunk, params).truncate!(0) } + + it { expect { subject }.to raise_error(NotImplementedError) } + end + + describe '#delete!' do + subject { described_class.new(job_trace_chunk, params).delete! } + + context 'when job_trace_chunk exists' do + before do + described_class.new(job_trace_chunk, params).write!(trace) + end + + it 'deletes' do + expect(::Ci::JobTraceChunk.exists?(job_id: job_id, chunk_index: chunk_index)) + .to be_truthy + + subject + + expect(::Ci::JobTraceChunk.exists?(job_id: job_id, chunk_index: chunk_index)) + .to be_falsy + end + end + + context 'when job_trace_chunk does not exist' do + it 'deletes' do + expect(::Ci::JobTraceChunk.exists?(job_id: job_id, chunk_index: chunk_index)) + .to be_falsy + + subject + + expect(::Ci::JobTraceChunk.exists?(job_id: job_id, chunk_index: chunk_index)) + .to be_falsy + end + end + end +end diff --git a/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/redis_spec.rb b/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/redis_spec.rb index ad5ca44b74cc..83423ac2a33b 100644 --- a/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/redis_spec.rb +++ b/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/redis_spec.rb @@ -2,8 +2,8 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_cache do let(:job_id) { 1 } - let(:buffer_size) { 128.kilobytes } let(:chunk_index) { 0 } + let(:buffer_size) { 128.kilobytes } let(:buffer_key) { described_class.buffer_key(job_id, chunk_index) } let(:params) { { buffer_size: buffer_size } } let(:trace) { 'Here is the trace' } @@ -111,16 +111,14 @@ context 'when buffer_key exists' do before do - Gitlab::Redis::Cache.with do |redis| - redis.set(buffer_key, trace) - end + described_class.new(buffer_key, params).write!(trace) end it { is_expected.to eq(trace) } end context 'when buffer_key does not exist' do - it { is_expected.not_to eq(trace) } + it { is_expected.to be_nil } end end @@ -129,9 +127,7 @@ context 'when buffer_key exists' do before do - Gitlab::Redis::Cache.with do |redis| - redis.set(buffer_key, trace) - end + described_class.new(buffer_key, params).write!(trace) end it { is_expected.to eq(trace.length) } @@ -147,9 +143,7 @@ context 'when buffer_key exists' do before do - Gitlab::Redis::Cache.with do |redis| - redis.set(buffer_key, 'Already data in the chunk') - end + described_class.new(buffer_key, params).write!('Already data in the chunk') end it 'overwrites' do @@ -187,9 +181,7 @@ context 'when buffer_key exists' do before do - Gitlab::Redis::Cache.with do |redis| - redis.set(buffer_key, trace) - end + described_class.new(buffer_key, params).write!(trace) end it 'truncates' do @@ -241,9 +233,7 @@ context 'when buffer_key exists' do before do - Gitlab::Redis::Cache.with do |redis| - redis.set(buffer_key, trace) - end + described_class.new(buffer_key, params).write!(trace) end it 'deletes' do -- GitLab From 1a05c60ee650fce76b7359464c36cc22f917ba62 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 30 Mar 2018 01:44:41 +0900 Subject: [PATCH 09/86] Add spec for chunked_io --- .../chunked_file/chunk_store/database.rb | 4 + .../chunk_store/object_storage.rb | 4 + .../trace/chunked_file/chunk_store/redis.rb | 7 + .../ci/trace/chunked_file/chunked_io.rb | 97 ++-- .../ci/trace/chunked_file/live_trace.rb | 2 +- lib/gitlab/ci/trace/chunked_file/remote.rb | 6 +- .../ci/trace/chunked_file/chunked_io_spec.rb | 481 ++++++++++++++++++ spec/support/chunked_io/chunked_io_helpers.rb | 32 ++ 8 files changed, 590 insertions(+), 43 deletions(-) create mode 100644 spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb create mode 100644 spec/support/chunked_io/chunked_io_helpers.rb diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb index b3a1c4734d32..3948c07c352d 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb @@ -61,6 +61,10 @@ def write!(data) data.length end + def append!(data) + raise NotImplementedError + end + def truncate!(offset) raise NotImplementedError end diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/object_storage.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/object_storage.rb index f144d670d032..95aaa9f9e2cb 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/object_storage.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/object_storage.rb @@ -65,6 +65,10 @@ def write!(data) raise NotImplementedError end + def append!(data) + raise NotImplementedError + end + def truncate!(offset) raise NotImplementedError end diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb index 6fa27b2c196f..574657803ddb 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb @@ -74,6 +74,13 @@ def write!(data) end end + def append!(data) + Gitlab::Redis::Cache.with do |redis| + redis.append(buffer_key, data) + data.length + end + end + def truncate!(offset) Gitlab::Redis::Cache.with do |redis| return unless redis.exists(buffer_key) diff --git a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb index 5c36158dbea1..c3a84083eaeb 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb @@ -1,12 +1,22 @@ ## -# This class is compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html) -# source: https://gitlab.com/snippets/1685610 +# This class is designed as it's compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html) module Gitlab module Ci class Trace module ChunkedFile class ChunkedIO + class << self + def open(job_id, size, mode) + stream = self.new(job_id, size, mode) + + yield stream + ensure + stream.close + end + end + WriteError = Class.new(StandardError) + FailedToGetChunkError = Class.new(StandardError) attr_reader :size attr_reader :tell @@ -23,7 +33,10 @@ def initialize(job_id, size, mode) if /(w|a)/ =~ mode @write_lock_uuid = Gitlab::ExclusiveLease.new(write_lock_key, timeout: 1.hour.to_i).try_obtain + raise WriteError, 'Already opened by another process' unless write_lock_uuid + + seek(0, IO::SEEK_END) if /a/ =~ mode end end @@ -39,10 +52,6 @@ def binmode? true end - def path - nil - end - def seek(pos, where = IO::SEEK_SET) new_pos = case where @@ -111,46 +120,56 @@ def readline end def write(data, &block) - raise WriteError, 'Already opened by another process' unless write_lock_uuid + raise WriteError, 'Could not write without lock' unless write_lock_uuid + raise WriteError, 'Could not write empty data' unless data.present? - while data.present? - empty_space = BUFFER_SIZE - chunk_offset + data = data.dup - chunk_store.open(job_id, chunk_index, params_for_store) do |store| - data_to_write = '' - data_to_write += store.get if store.size > 0 - data_to_write += data.slice!(0..empty_space) + chunk_index_start = chunk_index + chunk_index_end = (tell + data.length) / BUFFER_SIZE + prev_tell = tell - written_size = store.write!(data_to_write) + (chunk_index_start..chunk_index_end).each do |c_index| + chunk_store.open(job_id, c_index, params_for_store) do |store| + writable_space = BUFFER_SIZE - chunk_offset + writing_size = [writable_space, data.length].min - raise WriteError, 'Written size mismatch' unless data_to_write.length == written_size + if store.size > 0 + written_size = store.append!(data.slice!(0...writing_size)) + else + written_size = store.write!(data.slice!(0...writing_size)) + end - block.call(store, chunk_index) if block_given? + raise WriteError, 'Written size mismatch' unless writing_size == written_size @tell += written_size - @size += written_size + @size = [tell, size].max + + block.call(store, c_index) if block_given? end end + + tell - prev_tell end - def truncate(offset) - raise WriteError, 'Already opened by another process' unless write_lock_uuid + def truncate(offset, &block) + raise WriteError, 'Could not write without lock' unless write_lock_uuid + raise WriteError, 'Offset is out of bound' if offset > size || offset < 0 - removal_chunk_index_start = (offset / BUFFER_SIZE) - removal_chunk_index_end = chunks_count - 1 - removal_chunk_offset = offset % BUFFER_SIZE + chunk_index_start = (offset / BUFFER_SIZE) + chunk_index_end = chunks_count - 1 - if removal_chunk_offset > 0 - chunk_store.open(job_id, removal_chunk_index_start, params_for_store) do |store| - store.truncate!(removal_chunk_offset) - end + (chunk_index_start..chunk_index_end).reverse_each do |c_index| + chunk_store.open(job_id, c_index, params_for_store) do |store| + c_index_start = c_index * BUFFER_SIZE - removal_chunk_index_start += 1 - end + if offset <= c_index_start + store.delete! + else + store.truncate!(offset - c_index_start) if store.size > 0 + end - (removal_chunk_index_start..removal_chunk_index_end).each do |removal_chunk_index| - chunk_store.open(job_id, removal_chunk_index, params_for_store) do |store| - store.delete! + block.call(store, c_index) if block_given? end end @@ -165,15 +184,8 @@ def present? true end - def delete_chunks! - truncate(0) - end - private - ## - # The below methods are not implemented in IO class - # def in_range? @chunk_range&.include?(tell) end @@ -182,7 +194,10 @@ def get_chunk unless in_range? chunk_store.open(job_id, chunk_index, params_for_store) do |store| @chunk = store.get - @chunk_range = (chunk_start...(chunk_start + @chunk.length)) + + raise FailedToGetChunkError unless chunk + + @chunk_range = (chunk_start...(chunk_start + chunk.length)) end end @@ -223,6 +238,10 @@ def last_chunk? def write_lock_key "live_trace:operation:write:#{job_id}" end + + def chunk_store + raise NotImplementedError + end end end end diff --git a/lib/gitlab/ci/trace/chunked_file/live_trace.rb b/lib/gitlab/ci/trace/chunked_file/live_trace.rb index 7c39561bec70..081ae6e8d518 100644 --- a/lib/gitlab/ci/trace/chunked_file/live_trace.rb +++ b/lib/gitlab/ci/trace/chunked_file/live_trace.rb @@ -7,7 +7,7 @@ class LiveTrace < ChunkedIO class << self def open(job_id, mode) - stream = self.class.new(job_id, mode) + stream = self.new(job_id, mode) yield stream ensure diff --git a/lib/gitlab/ci/trace/chunked_file/remote.rb b/lib/gitlab/ci/trace/chunked_file/remote.rb index 56ff05c74943..caa2235bdbda 100644 --- a/lib/gitlab/ci/trace/chunked_file/remote.rb +++ b/lib/gitlab/ci/trace/chunked_file/remote.rb @@ -6,11 +6,11 @@ class Remote < ChunkedIO BUFFER_SIZE = 128.kilobytes class << self - def open(job_id, url, size, mode) - stream = self.class.new(job_id, mode) + def open(job_id, mode) + stream = self.new(job_id, mode) yield stream - + ensure stream.close end end diff --git a/spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb b/spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb new file mode 100644 index 000000000000..506ce71099af --- /dev/null +++ b/spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb @@ -0,0 +1,481 @@ +require 'spec_helper' + +describe Gitlab::Ci::Trace::ChunkedFile::ChunkedIO, :clean_gitlab_redis_cache do + include ChunkedIOHelpers + + let(:chunked_io) { described_class.new(job_id, size, mode) } + let(:job_id) { 1 } + let(:size) { sample_trace_size } + let(:mode) { 'rb' } + let(:buffer_size) { 128.kilobytes } + let(:chunk_store) { Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis } + + before do + allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_store) + stub_const("Gitlab::Ci::Trace::ChunkedFile::ChunkedIO::BUFFER_SIZE", buffer_size) + end + + describe '#new' do + context 'when mode is read' do + let(:mode) { 'rb' } + + it 'raises no exception' do + described_class.new(job_id, size, mode) + + expect { described_class.new(job_id, size, mode) }.not_to raise_error + end + end + + context 'when mode is write' do + let(:mode) { 'a+b' } + + it 'raises an exception' do + described_class.new(job_id, size, mode) + + expect { described_class.new(job_id, size, mode) }.to raise_error('Already opened by another process') + end + + context 'when closed after open' do + it 'does not raise an exception' do + described_class.new(job_id, size, mode).close + + expect { described_class.new(job_id, size, mode) }.not_to raise_error + end + end + end + end + + describe '#seek' do + subject { chunked_io.seek(pos, where) } + + context 'when moves pos to end of the file' do + let(:pos) { 0 } + let(:where) { IO::SEEK_END } + + it { is_expected.to eq(size) } + end + + context 'when moves pos to middle of the file' do + let(:pos) { size / 2 } + let(:where) { IO::SEEK_SET } + + it { is_expected.to eq(size / 2) } + end + + context 'when moves pos around' do + it 'matches the result' do + expect(chunked_io.seek(0)).to eq(0) + expect(chunked_io.seek(100, IO::SEEK_CUR)).to eq(100) + expect { chunked_io.seek(size + 1, IO::SEEK_CUR) }.to raise_error('new position is outside of file') + end + end + end + + describe '#eof?' do + subject { chunked_io.eof? } + + context 'when current pos is at end of the file' do + before do + chunked_io.seek(size, IO::SEEK_SET) + end + + it { is_expected.to be_truthy } + end + + context 'when current pos is not at end of the file' do + before do + chunked_io.seek(0, IO::SEEK_SET) + end + + it { is_expected.to be_falsey } + end + end + + describe '#each_line' do + let(:buffer_size) { 128.kilobytes } + let(:string_io) { StringIO.new(sample_trace_raw) } + + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'yields lines' do + expect { |b| described_class.new(job_id, size, 'rb').each_line(&b) } + .to yield_successive_args(*string_io.each_line.to_a) + end + end + + context 'when BUFFER_SIZE is larger than file size' do + let(:buffer_size) { size + 1000 } + + before do + set_larger_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'calls get_chunk only once' do + expect(chunk_store).to receive(:open).once.and_call_original + + described_class.new(job_id, size, 'rb').each_line { |line| } + end + end + end + + describe '#read' do + subject { described_class.new(job_id, size, 'rb').read(length) } + + context 'when read whole size' do + let(:length) { nil } + + context 'when BUFFER_SIZE is smaller than file size', :clean_gitlab_redis_cache do + before do + set_smaller_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'reads a trace' do + is_expected.to eq(sample_trace_raw) + end + end + + context 'when BUFFER_SIZE is larger than file size', :clean_gitlab_redis_cache do + before do + set_larger_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'reads a trace' do + is_expected.to eq(sample_trace_raw) + end + end + end + + context 'when read only first 100 bytes' do + let(:length) { 100 } + + context 'when BUFFER_SIZE is smaller than file size', :clean_gitlab_redis_cache do + before do + set_smaller_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'reads a trace' do + is_expected.to eq(sample_trace_raw[0, length]) + end + end + + context 'when BUFFER_SIZE is larger than file size', :clean_gitlab_redis_cache do + before do + set_larger_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'reads a trace' do + is_expected.to eq(sample_trace_raw[0, length]) + end + end + end + + context 'when tries to read oversize' do + let(:length) { size + 1000 } + + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'reads a trace' do + is_expected.to eq(sample_trace_raw) + end + end + + context 'when BUFFER_SIZE is larger than file size' do + before do + set_larger_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'reads a trace' do + is_expected.to eq(sample_trace_raw) + end + end + end + + context 'when tries to read 0 bytes' do + let(:length) { 0 } + + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'reads a trace' do + is_expected.to be_empty + end + end + + context 'when BUFFER_SIZE is larger than file size' do + before do + set_larger_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'reads a trace' do + is_expected.to be_empty + end + end + end + + context 'when chunk store failed to get chunk' do + let(:length) { nil } + + before do + fill_trace_to_chunks(sample_trace_raw) + + stub_chunk_store_redis_get_failed + end + + it 'reads a trace' do + expect { subject }.to raise_error(Gitlab::Ci::Trace::ChunkedFile::ChunkedIO::FailedToGetChunkError) + end + end + end + + describe '#readline' do + subject { chunked_io.readline } + + let(:string_io) { StringIO.new(sample_trace_raw) } + + shared_examples 'all line matching' do + it 'reads a line' do + (0...sample_trace_raw.lines.count).each do + expect(chunked_io.readline).to eq(string_io.readline) + end + end + end + + context 'when chunk store failed to get chunk' do + let(:length) { nil } + + before do + fill_trace_to_chunks(sample_trace_raw) + stub_chunk_store_redis_get_failed + end + + it 'reads a trace' do + expect { subject }.to raise_error(Gitlab::Ci::Trace::ChunkedFile::ChunkedIO::FailedToGetChunkError) + end + end + + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it_behaves_like 'all line matching' + end + + context 'when BUFFER_SIZE is larger than file size' do + before do + set_larger_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it_behaves_like 'all line matching' + end + + context 'when pos is at middle of the file' do + before do + set_smaller_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + + chunked_io.seek(size / 2) + string_io.seek(size / 2) + end + + it 'reads from pos' do + expect(chunked_io.readline).to eq(string_io.readline) + end + end + end + + describe '#write' do + subject { chunked_io.write(data) } + + let(:data) { sample_trace_raw } + + context 'when write mdoe' do + let(:mode) { 'wb' } + + context 'when BUFFER_SIZE is smaller than file size', :clean_gitlab_redis_cache do + before do + set_smaller_buffer_size_than(size) + end + + it 'writes a trace' do + is_expected.to eq(data.length) + + Gitlab::Ci::Trace::ChunkedFile::ChunkedIO.open(job_id, size, 'rb') do |stream| + expect(stream.read).to eq(data) + expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) + expect(chunk_store.chunks_size(job_id)).to eq(data.length) + end + end + end + + context 'when BUFFER_SIZE is larger than file size', :clean_gitlab_redis_cache do + before do + set_larger_buffer_size_than(size) + end + + it 'writes a trace' do + is_expected.to eq(data.length) + + Gitlab::Ci::Trace::ChunkedFile::ChunkedIO.open(job_id, size, 'rb') do |stream| + expect(stream.read).to eq(data) + expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) + expect(chunk_store.chunks_size(job_id)).to eq(data.length) + end + end + end + + context 'when data is nil' do + let(:data) { nil } + + it 'writes a trace' do + expect { subject } .to raise_error('Could not write empty data') + end + end + end + + context 'when append mdoe' do + let(:original_data) { 'original data' } + let(:total_size) { original_data.length + data.length } + + context 'when BUFFER_SIZE is smaller than file size', :clean_gitlab_redis_cache do + before do + set_smaller_buffer_size_than(size) + fill_trace_to_chunks(original_data) + end + + it 'appends a trace' do + described_class.open(job_id, original_data.length, 'a+b') do |stream| + expect(stream.write(data)).to eq(data.length) + end + + described_class.open(job_id, total_size, 'rb') do |stream| + expect(stream.read).to eq(original_data + data) + expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) + expect(chunk_store.chunks_size(job_id)).to eq(total_size) + end + end + end + + context 'when BUFFER_SIZE is larger than file size', :clean_gitlab_redis_cache do + before do + set_larger_buffer_size_than(size) + fill_trace_to_chunks(original_data) + end + + it 'appends a trace' do + described_class.open(job_id, original_data.length, 'a+b') do |stream| + expect(stream.write(data)).to eq(data.length) + end + + described_class.open(job_id, total_size, 'rb') do |stream| + expect(stream.read).to eq(original_data + data) + expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) + expect(chunk_store.chunks_size(job_id)).to eq(total_size) + end + end + end + end + end + + describe '#truncate' do + context 'when data exists' do + context 'when BUFFER_SIZE is smaller than file size', :clean_gitlab_redis_cache do + before do + set_smaller_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'truncates a trace' do + described_class.open(job_id, size, 'rb') do |stream| + expect(stream.read).to eq(sample_trace_raw) + end + + described_class.open(job_id, size, 'wb') do |stream| + stream.truncate(0) + end + + described_class.open(job_id, 0, 'rb') do |stream| + expect(stream.read).to be_empty + end + + expect(chunk_store.chunks_count(job_id)).to eq(0) + expect(chunk_store.chunks_size(job_id)).to eq(0) + end + + context 'when offset is negative', :clean_gitlab_redis_cache do + it 'raises an error' do + described_class.open(job_id, size, 'wb') do |stream| + expect { stream.truncate(-1) }.to raise_error('Offset is out of bound') + end + end + end + + context 'when offset is larger than file size', :clean_gitlab_redis_cache do + it 'raises an error' do + described_class.open(job_id, size, 'wb') do |stream| + expect { stream.truncate(size + 1) }.to raise_error('Offset is out of bound') + end + end + end + end + + context 'when BUFFER_SIZE is larger than file size', :clean_gitlab_redis_cache do + before do + set_larger_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'truncates a trace' do + described_class.open(job_id, size, 'rb') do |stream| + expect(stream.read).to eq(sample_trace_raw) + end + + described_class.open(job_id, size, 'wb') do |stream| + stream.truncate(0) + end + + described_class.open(job_id, 0, 'rb') do |stream| + expect(stream.read).to be_empty + end + + expect(chunk_store.chunks_count(job_id)).to eq(0) + expect(chunk_store.chunks_size(job_id)).to eq(0) + end + end + end + + context 'when data does not exist' do + before do + set_smaller_buffer_size_than(size) + end + + it 'truncates a trace' do + described_class.open(job_id, size, 'wb') do |stream| + stream.truncate(0) + expect(stream.send(:tell)).to eq(0) + expect(stream.send(:size)).to eq(0) + end + end + end + end +end diff --git a/spec/support/chunked_io/chunked_io_helpers.rb b/spec/support/chunked_io/chunked_io_helpers.rb new file mode 100644 index 000000000000..d87483620e5b --- /dev/null +++ b/spec/support/chunked_io/chunked_io_helpers.rb @@ -0,0 +1,32 @@ +module ChunkedIOHelpers + def fill_trace_to_chunks(data) + stream = Gitlab::Ci::Trace::ChunkedFile::ChunkedIO.new(job_id, data.length, 'wb') + stream.write(data) + stream.close + end + + def sample_trace_raw + @sample_trace_raw ||= File.read(expand_fixture_path('trace/sample_trace')) + end + + def sample_trace_size + sample_trace_raw.length + end + + def stub_chunk_store_redis_get_failed + allow_any_instance_of(Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis) + .to receive(:get).and_return(nil) + end + + def set_smaller_buffer_size_than(file_size) + blocks = (file_size / 128) + new_size = (blocks / 2) * 128 + stub_const("Gitlab::Ci::Trace::ChunkedFile::ChunkedIO::BUFFER_SIZE", new_size) + end + + def set_larger_buffer_size_than(file_size) + blocks = (file_size / 128) + new_size = (blocks * 2) * 128 + stub_const("Gitlab::Ci::Trace::ChunkedFile::ChunkedIO::BUFFER_SIZE", new_size) + end +end -- GitLab From 8a1c2bc4748dd5b210261905fd84466c25233959 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 30 Mar 2018 04:13:09 +0900 Subject: [PATCH 10/86] Add tests for ChunkStore::Database too --- .../ci/trace/chunked_file/chunk_store/base.rb | 4 + .../ci/trace/chunked_file/chunked_io.rb | 9 +- .../ci/trace/chunked_file/chunked_io_spec.rb | 474 +----------------- spec/support/chunked_io/chunked_io_helpers.rb | 11 +- .../chunked_io_shared_examples.rb | 468 +++++++++++++++++ 5 files changed, 499 insertions(+), 467 deletions(-) create mode 100644 spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb index 890cf26cba2f..d1d78ce010a5 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb @@ -30,6 +30,10 @@ def write!(data) raise NotImplementedError end + def append!(data) + raise NotImplementedError + end + def truncate!(offset) raise NotImplementedError end diff --git a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb index c3a84083eaeb..e4add1a53e81 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb @@ -15,6 +15,7 @@ def open(job_id, size, mode) end end + BUFFER_SIZE = 128.kilobytes WriteError = Class.new(StandardError) FailedToGetChunkError = Class.new(StandardError) @@ -134,6 +135,8 @@ def write(data, &block) writable_space = BUFFER_SIZE - chunk_offset writing_size = [writable_space, data.length].min + break unless writing_size > 0 + if store.size > 0 written_size = store.append!(data.slice!(0...writing_size)) else @@ -228,7 +231,11 @@ def chunk_index end def chunks_count - (size / BUFFER_SIZE) + 1 + (size / BUFFER_SIZE) + (has_extra? ? 1 : 0) + end + + def has_extra? + (size % BUFFER_SIZE) > 0 end def last_chunk? diff --git a/spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb b/spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb index 506ce71099af..6dec1b319bb6 100644 --- a/spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb +++ b/spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb @@ -4,478 +4,28 @@ include ChunkedIOHelpers let(:chunked_io) { described_class.new(job_id, size, mode) } - let(:job_id) { 1 } + let(:job) { create(:ci_build) } + let(:job_id) { job.id } let(:size) { sample_trace_size } let(:mode) { 'rb' } - let(:buffer_size) { 128.kilobytes } - let(:chunk_store) { Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis } - before do - allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_store) - stub_const("Gitlab::Ci::Trace::ChunkedFile::ChunkedIO::BUFFER_SIZE", buffer_size) - end - - describe '#new' do - context 'when mode is read' do - let(:mode) { 'rb' } - - it 'raises no exception' do - described_class.new(job_id, size, mode) - - expect { described_class.new(job_id, size, mode) }.not_to raise_error - end - end - - context 'when mode is write' do - let(:mode) { 'a+b' } - - it 'raises an exception' do - described_class.new(job_id, size, mode) - - expect { described_class.new(job_id, size, mode) }.to raise_error('Already opened by another process') - end - - context 'when closed after open' do - it 'does not raise an exception' do - described_class.new(job_id, size, mode).close - - expect { described_class.new(job_id, size, mode) }.not_to raise_error - end - end - end - end - - describe '#seek' do - subject { chunked_io.seek(pos, where) } - - context 'when moves pos to end of the file' do - let(:pos) { 0 } - let(:where) { IO::SEEK_END } - - it { is_expected.to eq(size) } - end - - context 'when moves pos to middle of the file' do - let(:pos) { size / 2 } - let(:where) { IO::SEEK_SET } - - it { is_expected.to eq(size / 2) } - end - - context 'when moves pos around' do - it 'matches the result' do - expect(chunked_io.seek(0)).to eq(0) - expect(chunked_io.seek(100, IO::SEEK_CUR)).to eq(100) - expect { chunked_io.seek(size + 1, IO::SEEK_CUR) }.to raise_error('new position is outside of file') - end - end - end - - describe '#eof?' do - subject { chunked_io.eof? } - - context 'when current pos is at end of the file' do - before do - chunked_io.seek(size, IO::SEEK_SET) - end - - it { is_expected.to be_truthy } - end - - context 'when current pos is not at end of the file' do - before do - chunked_io.seek(0, IO::SEEK_SET) - end - - it { is_expected.to be_falsey } - end - end + describe 'ChunkStore is Redis', :partial_support do + let(:chunk_store) { Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis } - describe '#each_line' do - let(:buffer_size) { 128.kilobytes } - let(:string_io) { StringIO.new(sample_trace_raw) } - - context 'when BUFFER_SIZE is smaller than file size' do - before do - set_smaller_buffer_size_than(size) - fill_trace_to_chunks(sample_trace_raw) - end - - it 'yields lines' do - expect { |b| described_class.new(job_id, size, 'rb').each_line(&b) } - .to yield_successive_args(*string_io.each_line.to_a) - end + before do + allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_store) end - context 'when BUFFER_SIZE is larger than file size' do - let(:buffer_size) { size + 1000 } - - before do - set_larger_buffer_size_than(size) - fill_trace_to_chunks(sample_trace_raw) - end - - it 'calls get_chunk only once' do - expect(chunk_store).to receive(:open).once.and_call_original - - described_class.new(job_id, size, 'rb').each_line { |line| } - end - end + it_behaves_like 'ChunkedIO shared tests' end - describe '#read' do - subject { described_class.new(job_id, size, 'rb').read(length) } - - context 'when read whole size' do - let(:length) { nil } - - context 'when BUFFER_SIZE is smaller than file size', :clean_gitlab_redis_cache do - before do - set_smaller_buffer_size_than(size) - fill_trace_to_chunks(sample_trace_raw) - end - - it 'reads a trace' do - is_expected.to eq(sample_trace_raw) - end - end - - context 'when BUFFER_SIZE is larger than file size', :clean_gitlab_redis_cache do - before do - set_larger_buffer_size_than(size) - fill_trace_to_chunks(sample_trace_raw) - end - - it 'reads a trace' do - is_expected.to eq(sample_trace_raw) - end - end - end - - context 'when read only first 100 bytes' do - let(:length) { 100 } - - context 'when BUFFER_SIZE is smaller than file size', :clean_gitlab_redis_cache do - before do - set_smaller_buffer_size_than(size) - fill_trace_to_chunks(sample_trace_raw) - end - - it 'reads a trace' do - is_expected.to eq(sample_trace_raw[0, length]) - end - end + describe 'ChunkStore is Database' do + let(:chunk_store) { Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database } - context 'when BUFFER_SIZE is larger than file size', :clean_gitlab_redis_cache do - before do - set_larger_buffer_size_than(size) - fill_trace_to_chunks(sample_trace_raw) - end - - it 'reads a trace' do - is_expected.to eq(sample_trace_raw[0, length]) - end - end - end - - context 'when tries to read oversize' do - let(:length) { size + 1000 } - - context 'when BUFFER_SIZE is smaller than file size' do - before do - set_smaller_buffer_size_than(size) - fill_trace_to_chunks(sample_trace_raw) - end - - it 'reads a trace' do - is_expected.to eq(sample_trace_raw) - end - end - - context 'when BUFFER_SIZE is larger than file size' do - before do - set_larger_buffer_size_than(size) - fill_trace_to_chunks(sample_trace_raw) - end - - it 'reads a trace' do - is_expected.to eq(sample_trace_raw) - end - end + before do + allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_store) end - context 'when tries to read 0 bytes' do - let(:length) { 0 } - - context 'when BUFFER_SIZE is smaller than file size' do - before do - set_smaller_buffer_size_than(size) - fill_trace_to_chunks(sample_trace_raw) - end - - it 'reads a trace' do - is_expected.to be_empty - end - end - - context 'when BUFFER_SIZE is larger than file size' do - before do - set_larger_buffer_size_than(size) - fill_trace_to_chunks(sample_trace_raw) - end - - it 'reads a trace' do - is_expected.to be_empty - end - end - end - - context 'when chunk store failed to get chunk' do - let(:length) { nil } - - before do - fill_trace_to_chunks(sample_trace_raw) - - stub_chunk_store_redis_get_failed - end - - it 'reads a trace' do - expect { subject }.to raise_error(Gitlab::Ci::Trace::ChunkedFile::ChunkedIO::FailedToGetChunkError) - end - end - end - - describe '#readline' do - subject { chunked_io.readline } - - let(:string_io) { StringIO.new(sample_trace_raw) } - - shared_examples 'all line matching' do - it 'reads a line' do - (0...sample_trace_raw.lines.count).each do - expect(chunked_io.readline).to eq(string_io.readline) - end - end - end - - context 'when chunk store failed to get chunk' do - let(:length) { nil } - - before do - fill_trace_to_chunks(sample_trace_raw) - stub_chunk_store_redis_get_failed - end - - it 'reads a trace' do - expect { subject }.to raise_error(Gitlab::Ci::Trace::ChunkedFile::ChunkedIO::FailedToGetChunkError) - end - end - - context 'when BUFFER_SIZE is smaller than file size' do - before do - set_smaller_buffer_size_than(size) - fill_trace_to_chunks(sample_trace_raw) - end - - it_behaves_like 'all line matching' - end - - context 'when BUFFER_SIZE is larger than file size' do - before do - set_larger_buffer_size_than(size) - fill_trace_to_chunks(sample_trace_raw) - end - - it_behaves_like 'all line matching' - end - - context 'when pos is at middle of the file' do - before do - set_smaller_buffer_size_than(size) - fill_trace_to_chunks(sample_trace_raw) - - chunked_io.seek(size / 2) - string_io.seek(size / 2) - end - - it 'reads from pos' do - expect(chunked_io.readline).to eq(string_io.readline) - end - end - end - - describe '#write' do - subject { chunked_io.write(data) } - - let(:data) { sample_trace_raw } - - context 'when write mdoe' do - let(:mode) { 'wb' } - - context 'when BUFFER_SIZE is smaller than file size', :clean_gitlab_redis_cache do - before do - set_smaller_buffer_size_than(size) - end - - it 'writes a trace' do - is_expected.to eq(data.length) - - Gitlab::Ci::Trace::ChunkedFile::ChunkedIO.open(job_id, size, 'rb') do |stream| - expect(stream.read).to eq(data) - expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) - expect(chunk_store.chunks_size(job_id)).to eq(data.length) - end - end - end - - context 'when BUFFER_SIZE is larger than file size', :clean_gitlab_redis_cache do - before do - set_larger_buffer_size_than(size) - end - - it 'writes a trace' do - is_expected.to eq(data.length) - - Gitlab::Ci::Trace::ChunkedFile::ChunkedIO.open(job_id, size, 'rb') do |stream| - expect(stream.read).to eq(data) - expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) - expect(chunk_store.chunks_size(job_id)).to eq(data.length) - end - end - end - - context 'when data is nil' do - let(:data) { nil } - - it 'writes a trace' do - expect { subject } .to raise_error('Could not write empty data') - end - end - end - - context 'when append mdoe' do - let(:original_data) { 'original data' } - let(:total_size) { original_data.length + data.length } - - context 'when BUFFER_SIZE is smaller than file size', :clean_gitlab_redis_cache do - before do - set_smaller_buffer_size_than(size) - fill_trace_to_chunks(original_data) - end - - it 'appends a trace' do - described_class.open(job_id, original_data.length, 'a+b') do |stream| - expect(stream.write(data)).to eq(data.length) - end - - described_class.open(job_id, total_size, 'rb') do |stream| - expect(stream.read).to eq(original_data + data) - expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) - expect(chunk_store.chunks_size(job_id)).to eq(total_size) - end - end - end - - context 'when BUFFER_SIZE is larger than file size', :clean_gitlab_redis_cache do - before do - set_larger_buffer_size_than(size) - fill_trace_to_chunks(original_data) - end - - it 'appends a trace' do - described_class.open(job_id, original_data.length, 'a+b') do |stream| - expect(stream.write(data)).to eq(data.length) - end - - described_class.open(job_id, total_size, 'rb') do |stream| - expect(stream.read).to eq(original_data + data) - expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) - expect(chunk_store.chunks_size(job_id)).to eq(total_size) - end - end - end - end - end - - describe '#truncate' do - context 'when data exists' do - context 'when BUFFER_SIZE is smaller than file size', :clean_gitlab_redis_cache do - before do - set_smaller_buffer_size_than(size) - fill_trace_to_chunks(sample_trace_raw) - end - - it 'truncates a trace' do - described_class.open(job_id, size, 'rb') do |stream| - expect(stream.read).to eq(sample_trace_raw) - end - - described_class.open(job_id, size, 'wb') do |stream| - stream.truncate(0) - end - - described_class.open(job_id, 0, 'rb') do |stream| - expect(stream.read).to be_empty - end - - expect(chunk_store.chunks_count(job_id)).to eq(0) - expect(chunk_store.chunks_size(job_id)).to eq(0) - end - - context 'when offset is negative', :clean_gitlab_redis_cache do - it 'raises an error' do - described_class.open(job_id, size, 'wb') do |stream| - expect { stream.truncate(-1) }.to raise_error('Offset is out of bound') - end - end - end - - context 'when offset is larger than file size', :clean_gitlab_redis_cache do - it 'raises an error' do - described_class.open(job_id, size, 'wb') do |stream| - expect { stream.truncate(size + 1) }.to raise_error('Offset is out of bound') - end - end - end - end - - context 'when BUFFER_SIZE is larger than file size', :clean_gitlab_redis_cache do - before do - set_larger_buffer_size_than(size) - fill_trace_to_chunks(sample_trace_raw) - end - - it 'truncates a trace' do - described_class.open(job_id, size, 'rb') do |stream| - expect(stream.read).to eq(sample_trace_raw) - end - - described_class.open(job_id, size, 'wb') do |stream| - stream.truncate(0) - end - - described_class.open(job_id, 0, 'rb') do |stream| - expect(stream.read).to be_empty - end - - expect(chunk_store.chunks_count(job_id)).to eq(0) - expect(chunk_store.chunks_size(job_id)).to eq(0) - end - end - end - - context 'when data does not exist' do - before do - set_smaller_buffer_size_than(size) - end - - it 'truncates a trace' do - described_class.open(job_id, size, 'wb') do |stream| - stream.truncate(0) - expect(stream.send(:tell)).to eq(0) - expect(stream.send(:size)).to eq(0) - end - end - end + it_behaves_like 'ChunkedIO shared tests' end end diff --git a/spec/support/chunked_io/chunked_io_helpers.rb b/spec/support/chunked_io/chunked_io_helpers.rb index d87483620e5b..d35968e460c9 100644 --- a/spec/support/chunked_io/chunked_io_helpers.rb +++ b/spec/support/chunked_io/chunked_io_helpers.rb @@ -6,16 +6,19 @@ def fill_trace_to_chunks(data) end def sample_trace_raw - @sample_trace_raw ||= File.read(expand_fixture_path('trace/sample_trace')) + if chunk_store == Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis + File.read(expand_fixture_path('trace/sample_trace')) + else + '01234567' * 32 + end end def sample_trace_size sample_trace_raw.length end - def stub_chunk_store_redis_get_failed - allow_any_instance_of(Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis) - .to receive(:get).and_return(nil) + def stub_chunk_store_get_failed + allow_any_instance_of(chunk_store).to receive(:get).and_return(nil) end def set_smaller_buffer_size_than(file_size) diff --git a/spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb new file mode 100644 index 000000000000..b1c9eabf75f4 --- /dev/null +++ b/spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb @@ -0,0 +1,468 @@ +shared_examples "ChunkedIO shared tests" do + around(:each, :partial_support) do |example| + example.run if chunk_store == Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis + end + + describe '#new' do + context 'when mode is read' do + let(:mode) { 'rb' } + + it 'raises no exception' do + described_class.new(job_id, size, mode) + + expect { described_class.new(job_id, size, mode) }.not_to raise_error + end + end + + context 'when mode is write' do + let(:mode) { 'a+b' } + + it 'raises an exception' do + described_class.new(job_id, size, mode) + + expect { described_class.new(job_id, size, mode) }.to raise_error('Already opened by another process') + end + + context 'when closed after open' do + it 'does not raise an exception' do + described_class.new(job_id, size, mode).close + + expect { described_class.new(job_id, size, mode) }.not_to raise_error + end + end + end + end + + describe '#seek' do + subject { chunked_io.seek(pos, where) } + + context 'when moves pos to end of the file' do + let(:pos) { 0 } + let(:where) { IO::SEEK_END } + + it { is_expected.to eq(size) } + end + + context 'when moves pos to middle of the file' do + let(:pos) { size / 2 } + let(:where) { IO::SEEK_SET } + + it { is_expected.to eq(size / 2) } + end + + context 'when moves pos around' do + it 'matches the result' do + expect(chunked_io.seek(0)).to eq(0) + expect(chunked_io.seek(100, IO::SEEK_CUR)).to eq(100) + expect { chunked_io.seek(size + 1, IO::SEEK_CUR) }.to raise_error('new position is outside of file') + end + end + end + + describe '#eof?' do + subject { chunked_io.eof? } + + context 'when current pos is at end of the file' do + before do + chunked_io.seek(size, IO::SEEK_SET) + end + + it { is_expected.to be_truthy } + end + + context 'when current pos is not at end of the file' do + before do + chunked_io.seek(0, IO::SEEK_SET) + end + + it { is_expected.to be_falsey } + end + end + + describe '#each_line' do + let(:string_io) { StringIO.new(sample_trace_raw) } + + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'yields lines' do + expect { |b| described_class.new(job_id, size, 'rb').each_line(&b) } + .to yield_successive_args(*string_io.each_line.to_a) + end + end + + context 'when BUFFER_SIZE is larger than file size', :partial_support do + before do + set_larger_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'calls get_chunk only once' do + expect(chunk_store).to receive(:open).once.and_call_original + + described_class.new(job_id, size, 'rb').each_line { |line| } + end + end + end + + describe '#read' do + subject { described_class.new(job_id, size, 'rb').read(length) } + + context 'when read whole size' do + let(:length) { nil } + + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'reads a trace' do + is_expected.to eq(sample_trace_raw) + end + end + + context 'when BUFFER_SIZE is larger than file size', :partial_support do + before do + set_larger_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'reads a trace' do + is_expected.to eq(sample_trace_raw) + end + end + end + + context 'when read only first 100 bytes' do + let(:length) { 100 } + + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'reads a trace' do + is_expected.to eq(sample_trace_raw[0, length]) + end + end + + context 'when BUFFER_SIZE is larger than file size', :partial_support do + before do + set_larger_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'reads a trace' do + is_expected.to eq(sample_trace_raw[0, length]) + end + end + end + + context 'when tries to read oversize' do + let(:length) { size + 1000 } + + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'reads a trace' do + is_expected.to eq(sample_trace_raw) + end + end + + context 'when BUFFER_SIZE is larger than file size', :partial_support do + before do + set_larger_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'reads a trace' do + is_expected.to eq(sample_trace_raw) + end + end + end + + context 'when tries to read 0 bytes' do + let(:length) { 0 } + + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'reads a trace' do + is_expected.to be_empty + end + end + + context 'when BUFFER_SIZE is larger than file size', :partial_support do + before do + set_larger_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'reads a trace' do + is_expected.to be_empty + end + end + end + + context 'when chunk store failed to get chunk' do + let(:length) { nil } + + before do + set_smaller_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + + stub_chunk_store_get_failed + end + + it 'reads a trace' do + expect { subject }.to raise_error(Gitlab::Ci::Trace::ChunkedFile::ChunkedIO::FailedToGetChunkError) + end + end + end + + describe '#readline' do + subject { chunked_io.readline } + + let(:string_io) { StringIO.new(sample_trace_raw) } + + shared_examples 'all line matching' do + it 'reads a line' do + (0...sample_trace_raw.lines.count).each do + expect(chunked_io.readline).to eq(string_io.readline) + end + end + end + + context 'when chunk store failed to get chunk' do + let(:length) { nil } + + before do + set_smaller_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + stub_chunk_store_get_failed + end + + it 'reads a trace' do + expect { subject }.to raise_error(Gitlab::Ci::Trace::ChunkedFile::ChunkedIO::FailedToGetChunkError) + end + end + + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it_behaves_like 'all line matching' + end + + context 'when BUFFER_SIZE is larger than file size', :partial_support do + before do + set_larger_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it_behaves_like 'all line matching' + end + + context 'when pos is at middle of the file' do + before do + set_smaller_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + + chunked_io.seek(size / 2) + string_io.seek(size / 2) + end + + it 'reads from pos' do + expect(chunked_io.readline).to eq(string_io.readline) + end + end + end + + describe '#write' do + subject { chunked_io.write(data) } + + let(:data) { sample_trace_raw } + + context 'when write mdoe' do + let(:mode) { 'wb' } + + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + end + + it 'writes a trace' do + is_expected.to eq(data.length) + + Gitlab::Ci::Trace::ChunkedFile::ChunkedIO.open(job_id, size, 'rb') do |stream| + expect(stream.read).to eq(data) + expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) + expect(chunk_store.chunks_size(job_id)).to eq(data.length) + end + end + end + + context 'when BUFFER_SIZE is larger than file size', :partial_support do + before do + set_larger_buffer_size_than(size) + end + + it 'writes a trace' do + is_expected.to eq(data.length) + + Gitlab::Ci::Trace::ChunkedFile::ChunkedIO.open(job_id, size, 'rb') do |stream| + expect(stream.read).to eq(data) + expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) + expect(chunk_store.chunks_size(job_id)).to eq(data.length) + end + end + end + + context 'when data is nil' do + let(:data) { nil } + + it 'writes a trace' do + expect { subject } .to raise_error('Could not write empty data') + end + end + end + + context 'when append mdoe', :partial_support do + let(:original_data) { 'original data' } + let(:total_size) { original_data.length + data.length } + + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + fill_trace_to_chunks(original_data) + end + + it 'appends a trace' do + described_class.open(job_id, original_data.length, 'a+b') do |stream| + expect(stream.write(data)).to eq(data.length) + end + + described_class.open(job_id, total_size, 'rb') do |stream| + expect(stream.read).to eq(original_data + data) + expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) + expect(chunk_store.chunks_size(job_id)).to eq(total_size) + end + end + end + + context 'when BUFFER_SIZE is larger than file size' do + before do + set_larger_buffer_size_than(size) + fill_trace_to_chunks(original_data) + end + + it 'appends a trace' do + described_class.open(job_id, original_data.length, 'a+b') do |stream| + expect(stream.write(data)).to eq(data.length) + end + + described_class.open(job_id, total_size, 'rb') do |stream| + expect(stream.read).to eq(original_data + data) + expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) + expect(chunk_store.chunks_size(job_id)).to eq(total_size) + end + end + end + end + end + + describe '#truncate' do + context 'when data exists' do + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'truncates a trace' do + described_class.open(job_id, size, 'rb') do |stream| + expect(stream.read).to eq(sample_trace_raw) + end + + described_class.open(job_id, size, 'wb') do |stream| + stream.truncate(0) + end + + described_class.open(job_id, 0, 'rb') do |stream| + expect(stream.read).to be_empty + end + + expect(chunk_store.chunks_count(job_id)).to eq(0) + expect(chunk_store.chunks_size(job_id)).to eq(0) + end + + context 'when offset is negative' do + it 'raises an error' do + described_class.open(job_id, size, 'wb') do |stream| + expect { stream.truncate(-1) }.to raise_error('Offset is out of bound') + end + end + end + + context 'when offset is larger than file size' do + it 'raises an error' do + described_class.open(job_id, size, 'wb') do |stream| + expect { stream.truncate(size + 1) }.to raise_error('Offset is out of bound') + end + end + end + end + + context 'when BUFFER_SIZE is larger than file size', :partial_support do + before do + set_larger_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'truncates a trace' do + described_class.open(job_id, size, 'rb') do |stream| + expect(stream.read).to eq(sample_trace_raw) + end + + described_class.open(job_id, size, 'wb') do |stream| + stream.truncate(0) + end + + described_class.open(job_id, 0, 'rb') do |stream| + expect(stream.read).to be_empty + end + + expect(chunk_store.chunks_count(job_id)).to eq(0) + expect(chunk_store.chunks_size(job_id)).to eq(0) + end + end + end + + context 'when data does not exist' do + before do + set_smaller_buffer_size_than(size) + end + + it 'truncates a trace' do + described_class.open(job_id, size, 'wb') do |stream| + stream.truncate(0) + expect(stream.send(:tell)).to eq(0) + expect(stream.send(:size)).to eq(0) + end + end + end + end +end -- GitLab From d1632da8c30b69ff915e78a86a661282b8ef24e6 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Sun, 1 Apr 2018 13:34:29 +0900 Subject: [PATCH 11/86] Implement basic live trace feature --- lib/gitlab/ci/trace.rb | 10 +- .../ci/trace/chunked_file/chunk_store/base.rb | 10 +- .../chunked_file/chunk_store/database.rb | 8 +- .../trace/chunked_file/chunk_store/redis.rb | 7 +- .../ci/trace/chunked_file/chunked_io.rb | 121 ++++++----- .../chunked_file/{remote.rb => http_io.rb} | 12 +- .../ci/trace/chunked_file/live_trace.rb | 49 +++-- .../ci/trace/chunked_file/chunked_io_spec.rb | 2 + .../ci/trace/chunked_file/live_trace_spec.rb | 201 ++++++++++++++++++ spec/support/chunked_io/chunked_io_helpers.rb | 20 +- spec/support/chunked_io/live_trace_helpers.rb | 32 +++ .../chunked_io_shared_examples.rb | 48 ++--- 12 files changed, 394 insertions(+), 126 deletions(-) rename lib/gitlab/ci/trace/chunked_file/{remote.rb => http_io.rb} (77%) create mode 100644 spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb create mode 100644 spec/support/chunked_io/live_trace_helpers.rb diff --git a/lib/gitlab/ci/trace.rb b/lib/gitlab/ci/trace.rb index 49f31352a659..9db65961a520 100644 --- a/lib/gitlab/ci/trace.rb +++ b/lib/gitlab/ci/trace.rb @@ -61,8 +61,8 @@ def read stream = Gitlab::Ci::Trace::Stream.new do if trace_artifact trace_artifact.open - elsif Feature.enabled?('ci_enable_live_trace') && LiveTraceFile.exists?(job.id) - LiveTraceFile.new(job.id, "rb") + elsif Feature.enabled?('ci_enable_live_trace') && ChunkedFile::LiveTrace.exists?(job.id) + ChunkedFile::LiveTrace.new(job.id, "rb") elsif current_path File.open(current_path, "rb") elsif old_trace @@ -81,7 +81,7 @@ def write if current_path current_path else - LiveTraceFile.new(job.id, "a+b") + ChunkedFile::LiveTrace.new(job.id, "a+b") end else File.open(ensure_path, "a+b") @@ -109,8 +109,8 @@ def archive! raise ArchiveError, 'Already archived' if trace_artifact raise ArchiveError, 'Job is not finished yet' unless job.complete? - if Feature.enabled?('ci_enable_live_trace') && LiveTraceFile.exists?(job.id) - LiveTraceFile.open(job.id, "wb") do |stream| + if Feature.enabled?('ci_enable_live_trace') && ChunkedFile::LiveTrace.exists?(job.id) + ChunkedFile::LiveTrace.open(job.id, "wb") do |stream| archive_stream!(stream) stream.truncate(0) end diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb index d1d78ce010a5..e2645918a40c 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb @@ -4,14 +4,10 @@ class Trace module ChunkedFile module ChunkStore class Base - attr_reader :buffer_size - attr_reader :chunk_start - attr_reader :url + attr_reader :params def initialize(*identifiers, **params) - @buffer_size = params[:buffer_size] - @chunk_start = params[:chunk_start] - @url = params[:url] + @params = params end def close @@ -43,7 +39,7 @@ def delete! end def filled? - size == buffer_size + size == params[:buffer_size] end end end diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb index 3948c07c352d..45bf5053775c 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb @@ -52,7 +52,8 @@ def size end def write!(data) - raise NotImplementedError, 'Partial write is not supported' unless buffer_size == data&.length + puts "#{self.class.name} - #{__callee__}: data.length: #{data.length.inspect} params[:chunk_index]: #{params[:chunk_index]}" + raise NotImplementedError, 'Partial write is not supported' unless params[:buffer_size] == data&.length raise NotImplementedError, 'UPDATE is not supported' if job_trace_chunk.data job_trace_chunk.data = data @@ -66,10 +67,13 @@ def append!(data) end def truncate!(offset) - raise NotImplementedError + raise NotImplementedError, 'Partial truncate is not supported' unless offset == 0 + + delete! end def delete! + puts "#{self.class.name} - #{__callee__}: params[:chunk_index]: #{params[:chunk_index]}" job_trace_chunk.destroy! end end diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb index 574657803ddb..d77a6847a718 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb @@ -68,6 +68,7 @@ def size end def write!(data) + puts "#{self.class.name} - #{__callee__}: data.length: #{data.length.inspect} params[:chunk_index]: #{params[:chunk_index]}" Gitlab::Redis::Cache.with do |redis| redis.set(buffer_key, data) redis.strlen(buffer_key) @@ -75,6 +76,7 @@ def write!(data) end def append!(data) + puts "#{self.class.name} - #{__callee__}: data.length: #{data.length.inspect} params[:chunk_index]: #{params[:chunk_index]}" Gitlab::Redis::Cache.with do |redis| redis.append(buffer_key, data) data.length @@ -82,8 +84,10 @@ def append!(data) end def truncate!(offset) + puts "#{self.class.name} - #{__callee__}: offset: #{offset.inspect} params[:chunk_index]: #{params[:chunk_index]}" Gitlab::Redis::Cache.with do |redis| - return unless redis.exists(buffer_key) + return 0 unless redis.exists(buffer_key) + return delete! if offset == 0 truncated_data = redis.getrange(buffer_key, 0, offset) redis.set(buffer_key, truncated_data) @@ -91,6 +95,7 @@ def truncate!(offset) end def delete! + puts "#{self.class.name} - #{__callee__}: params[:chunk_index]: #{params[:chunk_index]}" Gitlab::Redis::Cache.with do |redis| redis.del(buffer_key) end diff --git a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb index e4add1a53e81..3adfc43769b9 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb @@ -1,21 +1,23 @@ ## -# This class is designed as it's compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html) +# ChunkedIO Engine +# +# Choose a chunk_store with your purpose +# This class is designed that it's compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html) module Gitlab module Ci class Trace module ChunkedFile class ChunkedIO class << self - def open(job_id, size, mode) - stream = self.new(job_id, size, mode) + def open(*args) + stream = self.new(*args) yield stream ensure - stream.close + stream&.close end end - BUFFER_SIZE = 128.kilobytes WriteError = Class.new(StandardError) FailedToGetChunkError = Class.new(StandardError) @@ -124,32 +126,16 @@ def write(data, &block) raise WriteError, 'Could not write without lock' unless write_lock_uuid raise WriteError, 'Could not write empty data' unless data.present? - data = data.dup - - chunk_index_start = chunk_index - chunk_index_end = (tell + data.length) / BUFFER_SIZE + _data = data.dup prev_tell = tell - (chunk_index_start..chunk_index_end).each do |c_index| - chunk_store.open(job_id, c_index, params_for_store) do |store| - writable_space = BUFFER_SIZE - chunk_offset - writing_size = [writable_space, data.length].min - - break unless writing_size > 0 - - if store.size > 0 - written_size = store.append!(data.slice!(0...writing_size)) - else - written_size = store.write!(data.slice!(0...writing_size)) - end + until _data.empty? + writable_space = buffer_size - chunk_offset + writing_size = [writable_space, _data.length].min + written_size = write_chunk!(_data.slice!(0...writing_size), &block) - raise WriteError, 'Written size mismatch' unless writing_size == written_size - - @tell += written_size - @size = [tell, size].max - - block.call(store, c_index) if block_given? - end + @tell += written_size + @size = [tell, size].max end tell - prev_tell @@ -159,24 +145,19 @@ def truncate(offset, &block) raise WriteError, 'Could not write without lock' unless write_lock_uuid raise WriteError, 'Offset is out of bound' if offset > size || offset < 0 - chunk_index_start = (offset / BUFFER_SIZE) - chunk_index_end = chunks_count - 1 + @tell = size - 1 - (chunk_index_start..chunk_index_end).reverse_each do |c_index| - chunk_store.open(job_id, c_index, params_for_store) do |store| - c_index_start = c_index * BUFFER_SIZE + until size == offset + truncatable_space = size - chunk_start + _chunk_offset = (offset <= chunk_start) ? 0 : offset % buffer_size + removed_size = truncate_chunk!(_chunk_offset, &block) - if offset <= c_index_start - store.delete! - else - store.truncate!(offset - c_index_start) if store.size > 0 - end - - block.call(store, c_index) if block_given? - end + @tell -= removed_size + @size -= removed_size end - @tell = @size = offset + @tell = [tell, 0].max + @size = [size, 0].max end def flush @@ -198,48 +179,76 @@ def get_chunk chunk_store.open(job_id, chunk_index, params_for_store) do |store| @chunk = store.get - raise FailedToGetChunkError unless chunk + raise FailedToGetChunkError unless chunk && chunk.length > 0 @chunk_range = (chunk_start...(chunk_start + chunk.length)) end end - @chunk[chunk_offset..BUFFER_SIZE] + @chunk[chunk_offset..buffer_size] + end + + def write_chunk!(data, &block) + chunk_store.open(job_id, chunk_index, params_for_store) do |store| + written_size = if buffer_size == data.length + store.write!(data) + else + store.append!(data) + end + + raise WriteError, 'Written size mismatch' unless data.length == written_size + + block.call(store) if block_given? + + written_size + end + end + + def truncate_chunk!(offset, &block) + chunk_store.open(job_id, chunk_index, params_for_store) do |store| + removed_size = store.size - offset + store.truncate!(offset) + + block.call(store) if block_given? + + removed_size + end end - def params_for_store + def params_for_store(c_index = chunk_index) { - buffer_size: BUFFER_SIZE, - chunk_start: chunk_start + buffer_size: buffer_size, + chunk_start: c_index * buffer_size, + chunk_index: c_index } end def chunk_offset - tell % BUFFER_SIZE + tell % buffer_size end def chunk_start - (tell / BUFFER_SIZE) * BUFFER_SIZE + chunk_index * buffer_size end def chunk_end - [chunk_start + BUFFER_SIZE, size].min + [chunk_start + buffer_size, size].min end def chunk_index - (tell / BUFFER_SIZE) + (tell / buffer_size) end def chunks_count - (size / BUFFER_SIZE) + (has_extra? ? 1 : 0) + (size / buffer_size) + (has_extra? ? 1 : 0) end def has_extra? - (size % BUFFER_SIZE) > 0 + (size % buffer_size) > 0 end def last_chunk? - chunk_index == (chunks_count - 1) + chunks_count == 0 || chunk_index == (chunks_count - 1) || chunk_index == chunks_count end def write_lock_key @@ -249,6 +258,10 @@ def write_lock_key def chunk_store raise NotImplementedError end + + def buffer_size + raise NotImplementedError + end end end end diff --git a/lib/gitlab/ci/trace/chunked_file/remote.rb b/lib/gitlab/ci/trace/chunked_file/http_io.rb similarity index 77% rename from lib/gitlab/ci/trace/chunked_file/remote.rb rename to lib/gitlab/ci/trace/chunked_file/http_io.rb index caa2235bdbda..32b2224a18b3 100644 --- a/lib/gitlab/ci/trace/chunked_file/remote.rb +++ b/lib/gitlab/ci/trace/chunked_file/http_io.rb @@ -2,19 +2,9 @@ module Gitlab module Ci class Trace module ChunkedFile - class Remote < ChunkedIO + class HttpIO < ChunkedIO BUFFER_SIZE = 128.kilobytes - class << self - def open(job_id, mode) - stream = self.new(job_id, mode) - - yield stream - ensure - stream.close - end - end - InvalidURLError = Class.new(StandardError) attr_reader :uri diff --git a/lib/gitlab/ci/trace/chunked_file/live_trace.rb b/lib/gitlab/ci/trace/chunked_file/live_trace.rb index 081ae6e8d518..264bb98ef6c2 100644 --- a/lib/gitlab/ci/trace/chunked_file/live_trace.rb +++ b/lib/gitlab/ci/trace/chunked_file/live_trace.rb @@ -3,17 +3,7 @@ module Ci class Trace module ChunkedFile class LiveTrace < ChunkedIO - BUFFER_SIZE = 128.kilobytes - class << self - def open(job_id, mode) - stream = self.new(job_id, mode) - - yield stream - ensure - stream.close - end - def exist?(job_id) ChunkStores::Redis.chunks_count(job_id) > 0 || ChunkStores::Database.chunks_count(job_id) > 0 @@ -21,7 +11,7 @@ def exist?(job_id) end def initialize(job_id, mode) - super(job_id, calculate_size, mode) + super(job_id, calculate_size(job_id), mode) end def write(data) @@ -29,30 +19,51 @@ def write(data) super(data) do |store| if store.filled? - # Rotate data from redis to database - ChunkStores::Database.open(job_id, chunk_index, params_for_store) do |to_store| + # Once data is filled into redis, move the data to database + ChunkStore::Database.open(job_id, chunk_index, params_for_store) do |to_store| to_store.write!(store.get) + store.delete! end + end + end + end + + def truncate(offset) + super(offset) do |store| + next if chunk_index == 0 + + prev_chunk_index = chunk_index - 1 - store.delete! + if ChunkStore::Database.exist?(job_id, prev_chunk_index) + # Swap data from Database to Redis to truncate any size than buffer_size + ChunkStore::Database.open(job_id, prev_chunk_index, params_for_store(prev_chunk_index)) do |from_store| + ChunkStore::Redis.open(job_id, prev_chunk_index, params_for_store(prev_chunk_index)) do |to_store| + to_store.write!(from_store.get) + from_store.delete! + end + end end end end private - def calculate_size - ChunkStores::Redis.chunks_size(job_id) + - ChunkStores::Database.chunks_size(job_id) + def calculate_size(job_id) + ChunkStore::Redis.chunks_size(job_id) + + ChunkStore::Database.chunks_size(job_id) end def chunk_store if last_chunk? - ChunkStores::Redis + ChunkStore::Redis else - ChunkStores::Database + ChunkStore::Database end end + + def buffer_size + 128.kilobytes + end end end end diff --git a/spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb b/spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb index 6dec1b319bb6..048cad9f2e05 100644 --- a/spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb +++ b/spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb @@ -14,6 +14,7 @@ before do allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_store) + allow_any_instance_of(described_class).to receive(:buffer_size).and_return(128.kilobytes) end it_behaves_like 'ChunkedIO shared tests' @@ -24,6 +25,7 @@ before do allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_store) + allow_any_instance_of(described_class).to receive(:buffer_size).and_return(128.kilobytes) end it_behaves_like 'ChunkedIO shared tests' diff --git a/spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb b/spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb new file mode 100644 index 000000000000..14f6c07a0396 --- /dev/null +++ b/spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb @@ -0,0 +1,201 @@ +require 'spec_helper' + +describe Gitlab::Ci::Trace::ChunkedFile::LiveTrace, :clean_gitlab_redis_cache do + include LiveTraceHelpers + + let(:chunked_io) { described_class.new(job_id, mode) } + let(:job) { create(:ci_build) } + let(:job_id) { job.id } + let(:size) { sample_trace_size } + let(:mode) { 'rb' } + + describe '#write' do + subject { chunked_io.write(data) } + + let(:data) { sample_trace_raw } + + context 'when write mode' do + let(:mode) { 'wb' } + + context 'when buffer size is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + end + + it 'writes a trace' do + is_expected.to eq(data.length) + + described_class.open(job_id, 'rb') do |stream| + expect(stream.read).to eq(data) + expect(total_chunks_count).to eq(stream.send(:chunks_count)) + expect(total_chunks_size).to eq(data.length) + end + end + end + + context 'when buffer size is larger than file size' do + before do + set_larger_buffer_size_than(size) + end + + it 'writes a trace' do + is_expected.to eq(data.length) + + described_class.open(job_id, 'rb') do |stream| + expect(stream.read).to eq(data) + expect(total_chunks_count).to eq(stream.send(:chunks_count)) + expect(total_chunks_size).to eq(data.length) + end + end + end + + context 'when data is nil' do + let(:data) { nil } + + it 'writes a trace' do + expect { subject } .to raise_error('Could not write empty data') + end + end + end + + context 'when append mode' do + let(:original_data) { 'original data' } + let(:total_size) { original_data.length + data.length } + + context 'when buffer size is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + fill_trace_to_chunks(original_data) + end + + it 'appends a trace' do + described_class.open(job_id, 'a+b') do |stream| + expect(stream.write(data)).to eq(data.length) + end + + described_class.open(job_id, 'rb') do |stream| + expect(stream.read).to eq(original_data + data) + expect(total_chunks_count).to eq(stream.send(:chunks_count)) + expect(total_chunks_size).to eq(total_size) + end + end + end + + context 'when buffer size is larger than file size' do + before do + set_larger_buffer_size_than(size) + fill_trace_to_chunks(original_data) + end + + it 'appends a trace' do + described_class.open(job_id, 'a+b') do |stream| + expect(stream.write(data)).to eq(data.length) + end + + described_class.open(job_id, 'rb') do |stream| + expect(stream.read).to eq(original_data + data) + expect(total_chunks_count).to eq(stream.send(:chunks_count)) + expect(total_chunks_size).to eq(total_size) + end + end + end + end + end + + describe '#truncate' do + context 'when data exists' do + context 'when buffer size is smaller than file size' do + before do + puts "#{self.class.name} - #{__callee__}: ===== 1" + set_smaller_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'truncates a trace' do + puts "#{self.class.name} - #{__callee__}: ===== 2" + described_class.open(job_id, 'rb') do |stream| + expect(stream.read).to eq(sample_trace_raw) + end + + puts "#{self.class.name} - #{__callee__}: ===== 3" + described_class.open(job_id, 'wb') do |stream| + stream.truncate(0) + end + + puts "#{self.class.name} - #{__callee__}: ===== 4" + expect(total_chunks_count).to eq(0) + expect(total_chunks_size).to eq(0) + + puts "#{self.class.name} - #{__callee__}: ===== 5" + described_class.open(job_id, 'rb') do |stream| + expect(stream.read).to be_empty + end + end + + context 'when offset is negative' do + it 'raises an error' do + described_class.open(job_id, 'wb') do |stream| + expect { stream.truncate(-1) }.to raise_error('Offset is out of bound') + end + end + end + + context 'when offset is larger than file size' do + it 'raises an error' do + described_class.open(job_id, 'wb') do |stream| + expect { stream.truncate(size + 1) }.to raise_error('Offset is out of bound') + end + end + end + end + + context 'when buffer size is larger than file size' do + before do + set_larger_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) + end + + it 'truncates a trace' do + described_class.open(job_id, 'rb') do |stream| + expect(stream.read).to eq(sample_trace_raw) + end + + described_class.open(job_id, 'wb') do |stream| + stream.truncate(0) + end + + described_class.open(job_id, 'rb') do |stream| + expect(stream.read).to be_empty + end + + expect(total_chunks_count).to eq(0) + expect(total_chunks_size).to eq(0) + end + end + end + + context 'when data does not exist' do + before do + set_smaller_buffer_size_than(size) + end + + it 'truncates a trace' do + described_class.open(job_id, 'wb') do |stream| + stream.truncate(0) + expect(stream.send(:tell)).to eq(0) + expect(stream.send(:size)).to eq(0) + end + end + end + end + + def total_chunks_count + Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis.chunks_count(job_id) + + Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database.chunks_count(job_id) + end + + def total_chunks_size + Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis.chunks_size(job_id) + + Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database.chunks_size(job_id) + end +end diff --git a/spec/support/chunked_io/chunked_io_helpers.rb b/spec/support/chunked_io/chunked_io_helpers.rb index d35968e460c9..c12ad743c001 100644 --- a/spec/support/chunked_io/chunked_io_helpers.rb +++ b/spec/support/chunked_io/chunked_io_helpers.rb @@ -1,6 +1,6 @@ module ChunkedIOHelpers def fill_trace_to_chunks(data) - stream = Gitlab::Ci::Trace::ChunkedFile::ChunkedIO.new(job_id, data.length, 'wb') + stream = described_class.new(job_id, data.length, 'wb') stream.write(data) stream.close end @@ -17,6 +17,20 @@ def sample_trace_size sample_trace_raw.length end + def sample_trace_raw_for_live_trace + File.read(expand_fixture_path('trace/sample_trace')) + end + + def sample_trace_size_for_live_trace + sample_trace_raw_for_live_trace.length + end + + def fill_trace_to_chunks_for_live_trace(data) + stream = described_class.new(job_id, 'wb') + stream.write(data) + stream.close + end + def stub_chunk_store_get_failed allow_any_instance_of(chunk_store).to receive(:get).and_return(nil) end @@ -24,12 +38,12 @@ def stub_chunk_store_get_failed def set_smaller_buffer_size_than(file_size) blocks = (file_size / 128) new_size = (blocks / 2) * 128 - stub_const("Gitlab::Ci::Trace::ChunkedFile::ChunkedIO::BUFFER_SIZE", new_size) + allow_any_instance_of(described_class).to receive(:buffer_size).and_return(new_size) end def set_larger_buffer_size_than(file_size) blocks = (file_size / 128) new_size = (blocks * 2) * 128 - stub_const("Gitlab::Ci::Trace::ChunkedFile::ChunkedIO::BUFFER_SIZE", new_size) + allow_any_instance_of(described_class).to receive(:buffer_size).and_return(new_size) end end diff --git a/spec/support/chunked_io/live_trace_helpers.rb b/spec/support/chunked_io/live_trace_helpers.rb new file mode 100644 index 000000000000..8ff85daff28d --- /dev/null +++ b/spec/support/chunked_io/live_trace_helpers.rb @@ -0,0 +1,32 @@ +module LiveTraceHelpers + def fill_trace_to_chunks(data) + stream = described_class.new(job_id, 'wb') + stream.write(data) + stream.close + end + + def sample_trace_raw + File.read(expand_fixture_path('trace/sample_trace')) + end + + def sample_trace_size + sample_trace_raw.length + end + + def stub_chunk_store_get_failed + allow_any_instance_of(Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis).to receive(:get).and_return(nil) + allow_any_instance_of(Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database).to receive(:get).and_return(nil) + end + + def set_smaller_buffer_size_than(file_size) + blocks = (file_size / 128) + new_size = (blocks / 2) * 128 + allow_any_instance_of(described_class).to receive(:buffer_size).and_return(new_size) + end + + def set_larger_buffer_size_than(file_size) + blocks = (file_size / 128) + new_size = (blocks * 2) * 128 + allow_any_instance_of(described_class).to receive(:buffer_size).and_return(new_size) + end +end diff --git a/spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb index b1c9eabf75f4..8bb3a6d8ff9a 100644 --- a/spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb +++ b/spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb @@ -82,7 +82,7 @@ describe '#each_line' do let(:string_io) { StringIO.new(sample_trace_raw) } - context 'when BUFFER_SIZE is smaller than file size' do + context 'when buffer size is smaller than file size' do before do set_smaller_buffer_size_than(size) fill_trace_to_chunks(sample_trace_raw) @@ -94,7 +94,7 @@ end end - context 'when BUFFER_SIZE is larger than file size', :partial_support do + context 'when buffer size is larger than file size', :partial_support do before do set_larger_buffer_size_than(size) fill_trace_to_chunks(sample_trace_raw) @@ -114,7 +114,7 @@ context 'when read whole size' do let(:length) { nil } - context 'when BUFFER_SIZE is smaller than file size' do + context 'when buffer size is smaller than file size' do before do set_smaller_buffer_size_than(size) fill_trace_to_chunks(sample_trace_raw) @@ -125,7 +125,7 @@ end end - context 'when BUFFER_SIZE is larger than file size', :partial_support do + context 'when buffer size is larger than file size', :partial_support do before do set_larger_buffer_size_than(size) fill_trace_to_chunks(sample_trace_raw) @@ -140,7 +140,7 @@ context 'when read only first 100 bytes' do let(:length) { 100 } - context 'when BUFFER_SIZE is smaller than file size' do + context 'when buffer size is smaller than file size' do before do set_smaller_buffer_size_than(size) fill_trace_to_chunks(sample_trace_raw) @@ -151,7 +151,7 @@ end end - context 'when BUFFER_SIZE is larger than file size', :partial_support do + context 'when buffer size is larger than file size', :partial_support do before do set_larger_buffer_size_than(size) fill_trace_to_chunks(sample_trace_raw) @@ -166,7 +166,7 @@ context 'when tries to read oversize' do let(:length) { size + 1000 } - context 'when BUFFER_SIZE is smaller than file size' do + context 'when buffer size is smaller than file size' do before do set_smaller_buffer_size_than(size) fill_trace_to_chunks(sample_trace_raw) @@ -177,7 +177,7 @@ end end - context 'when BUFFER_SIZE is larger than file size', :partial_support do + context 'when buffer size is larger than file size', :partial_support do before do set_larger_buffer_size_than(size) fill_trace_to_chunks(sample_trace_raw) @@ -192,7 +192,7 @@ context 'when tries to read 0 bytes' do let(:length) { 0 } - context 'when BUFFER_SIZE is smaller than file size' do + context 'when buffer size is smaller than file size' do before do set_smaller_buffer_size_than(size) fill_trace_to_chunks(sample_trace_raw) @@ -203,7 +203,7 @@ end end - context 'when BUFFER_SIZE is larger than file size', :partial_support do + context 'when buffer size is larger than file size', :partial_support do before do set_larger_buffer_size_than(size) fill_trace_to_chunks(sample_trace_raw) @@ -226,7 +226,7 @@ end it 'reads a trace' do - expect { subject }.to raise_error(Gitlab::Ci::Trace::ChunkedFile::ChunkedIO::FailedToGetChunkError) + expect { subject }.to raise_error(described_class::FailedToGetChunkError) end end end @@ -254,11 +254,11 @@ end it 'reads a trace' do - expect { subject }.to raise_error(Gitlab::Ci::Trace::ChunkedFile::ChunkedIO::FailedToGetChunkError) + expect { subject }.to raise_error(described_class::FailedToGetChunkError) end end - context 'when BUFFER_SIZE is smaller than file size' do + context 'when buffer size is smaller than file size' do before do set_smaller_buffer_size_than(size) fill_trace_to_chunks(sample_trace_raw) @@ -267,7 +267,7 @@ it_behaves_like 'all line matching' end - context 'when BUFFER_SIZE is larger than file size', :partial_support do + context 'when buffer size is larger than file size', :partial_support do before do set_larger_buffer_size_than(size) fill_trace_to_chunks(sample_trace_raw) @@ -296,10 +296,10 @@ let(:data) { sample_trace_raw } - context 'when write mdoe' do + context 'when write mode' do let(:mode) { 'wb' } - context 'when BUFFER_SIZE is smaller than file size' do + context 'when buffer size is smaller than file size' do before do set_smaller_buffer_size_than(size) end @@ -307,7 +307,7 @@ it 'writes a trace' do is_expected.to eq(data.length) - Gitlab::Ci::Trace::ChunkedFile::ChunkedIO.open(job_id, size, 'rb') do |stream| + described_class.open(job_id, size, 'rb') do |stream| expect(stream.read).to eq(data) expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) expect(chunk_store.chunks_size(job_id)).to eq(data.length) @@ -315,7 +315,7 @@ end end - context 'when BUFFER_SIZE is larger than file size', :partial_support do + context 'when buffer size is larger than file size', :partial_support do before do set_larger_buffer_size_than(size) end @@ -323,7 +323,7 @@ it 'writes a trace' do is_expected.to eq(data.length) - Gitlab::Ci::Trace::ChunkedFile::ChunkedIO.open(job_id, size, 'rb') do |stream| + described_class.open(job_id, size, 'rb') do |stream| expect(stream.read).to eq(data) expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) expect(chunk_store.chunks_size(job_id)).to eq(data.length) @@ -340,11 +340,11 @@ end end - context 'when append mdoe', :partial_support do + context 'when append mode', :partial_support do let(:original_data) { 'original data' } let(:total_size) { original_data.length + data.length } - context 'when BUFFER_SIZE is smaller than file size' do + context 'when buffer size is smaller than file size' do before do set_smaller_buffer_size_than(size) fill_trace_to_chunks(original_data) @@ -363,7 +363,7 @@ end end - context 'when BUFFER_SIZE is larger than file size' do + context 'when buffer size is larger than file size' do before do set_larger_buffer_size_than(size) fill_trace_to_chunks(original_data) @@ -386,7 +386,7 @@ describe '#truncate' do context 'when data exists' do - context 'when BUFFER_SIZE is smaller than file size' do + context 'when buffer size is smaller than file size' do before do set_smaller_buffer_size_than(size) fill_trace_to_chunks(sample_trace_raw) @@ -426,7 +426,7 @@ end end - context 'when BUFFER_SIZE is larger than file size', :partial_support do + context 'when buffer size is larger than file size', :partial_support do before do set_larger_buffer_size_than(size) fill_trace_to_chunks(sample_trace_raw) -- GitLab From 0971f301fb6e8bfb17e15e6853420ffc008af302 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Mon, 2 Apr 2018 04:20:44 +0900 Subject: [PATCH 12/86] Add new concerns --- lib/gitlab/ci/trace.rb | 6 +- .../chunked_file/chunk_store/database.rb | 8 +- .../trace/chunked_file/chunk_store/redis.rb | 9 +- .../ci/trace/chunked_file/chunked_io.rb | 150 ++++++++---------- .../trace/chunked_file/concerns/callbacks.rb | 38 +++++ .../ci/trace/chunked_file/concerns/errors.rb | 18 +++ .../ci/trace/chunked_file/concerns/hooks.rb | 63 ++++++++ .../ci/trace/chunked_file/concerns/opener.rb | 23 +++ .../chunked_file/concerns/permissions.rb | 65 ++++++++ .../ci/trace/chunked_file/live_trace.rb | 44 +++-- 10 files changed, 308 insertions(+), 116 deletions(-) create mode 100644 lib/gitlab/ci/trace/chunked_file/concerns/callbacks.rb create mode 100644 lib/gitlab/ci/trace/chunked_file/concerns/errors.rb create mode 100644 lib/gitlab/ci/trace/chunked_file/concerns/hooks.rb create mode 100644 lib/gitlab/ci/trace/chunked_file/concerns/opener.rb create mode 100644 lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb diff --git a/lib/gitlab/ci/trace.rb b/lib/gitlab/ci/trace.rb index 9db65961a520..83ed58f48454 100644 --- a/lib/gitlab/ci/trace.rb +++ b/lib/gitlab/ci/trace.rb @@ -61,7 +61,7 @@ def read stream = Gitlab::Ci::Trace::Stream.new do if trace_artifact trace_artifact.open - elsif Feature.enabled?('ci_enable_live_trace') && ChunkedFile::LiveTrace.exists?(job.id) + elsif ChunkedFile::LiveTrace.exist?(job.id) ChunkedFile::LiveTrace.new(job.id, "rb") elsif current_path File.open(current_path, "rb") @@ -109,10 +109,10 @@ def archive! raise ArchiveError, 'Already archived' if trace_artifact raise ArchiveError, 'Job is not finished yet' unless job.complete? - if Feature.enabled?('ci_enable_live_trace') && ChunkedFile::LiveTrace.exists?(job.id) + if ChunkedFile::LiveTrace.exist?(job.id) ChunkedFile::LiveTrace.open(job.id, "wb") do |stream| archive_stream!(stream) - stream.truncate(0) + stream.delete end elsif current_path File.open(current_path) do |stream| diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb index 45bf5053775c..a7db214f4285 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb @@ -29,6 +29,10 @@ def chunks_size(job_id) ::Ci::JobTraceChunk.where(job_id: job_id).pluck('data') .inject(0) { |sum, data| sum + data.length } end + + def delete_all(job_id) + ::Ci::JobTraceChunk.destroy_all(job_id: job_id) + end end attr_reader :job_trace_chunk @@ -67,9 +71,7 @@ def append!(data) end def truncate!(offset) - raise NotImplementedError, 'Partial truncate is not supported' unless offset == 0 - - delete! + raise NotImplementedError end def delete! diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb index d77a6847a718..cb45cd5fba54 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb @@ -38,6 +38,14 @@ def chunks_size(job_id) end end + def delete_all(job_id) + Gitlab::Redis::Cache.with do |redis| + redis.scan_each(:match => buffer_key(job_id, '?')) do |key| + redis.del(key) + end + end + end + def buffer_key(job_id, chunk_index) "live_trace_buffer:#{job_id}:#{chunk_index}" end @@ -87,7 +95,6 @@ def truncate!(offset) puts "#{self.class.name} - #{__callee__}: offset: #{offset.inspect} params[:chunk_index]: #{params[:chunk_index]}" Gitlab::Redis::Cache.with do |redis| return 0 unless redis.exists(buffer_key) - return delete! if offset == 0 truncated_data = redis.getrange(buffer_key, 0, offset) redis.set(buffer_key, truncated_data) diff --git a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb index 3adfc43769b9..d23fe2a47d51 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb @@ -8,43 +8,30 @@ module Ci class Trace module ChunkedFile class ChunkedIO - class << self - def open(*args) - stream = self.new(*args) - - yield stream - ensure - stream&.close - end - end - - WriteError = Class.new(StandardError) - FailedToGetChunkError = Class.new(StandardError) + extend ChunkedFile::Concerns::Opener + include ChunkedFile::Concerns::Errors + include ChunkedFile::Concerns::Hooks + include ChunkedFile::Concerns::Callbacks + prepend ChunkedFile::Concerns::Permissions attr_reader :size attr_reader :tell attr_reader :chunk, :chunk_range - attr_reader :write_lock_uuid attr_reader :job_id + attr_reader :mode alias_method :pos, :tell - def initialize(job_id, size, mode) + def initialize(job_id, size, mode = 'rb') @size = size @tell = 0 @job_id = job_id + @mode = mode - if /(w|a)/ =~ mode - @write_lock_uuid = Gitlab::ExclusiveLease.new(write_lock_key, timeout: 1.hour.to_i).try_obtain - - raise WriteError, 'Already opened by another process' unless write_lock_uuid - - seek(0, IO::SEEK_END) if /a/ =~ mode - end + raise NotImplementedError, "Mode 'w' is not supported" if mode.include?('w') end def close - Gitlab::ExclusiveLease.cancel(write_lock_key, write_lock_uuid) if write_lock_uuid end def binmode @@ -55,20 +42,20 @@ def binmode? true end - def seek(pos, where = IO::SEEK_SET) + def seek(amount, where = IO::SEEK_SET) new_pos = case where when IO::SEEK_END - size + pos + size + amount when IO::SEEK_SET - pos + amount when IO::SEEK_CUR - tell + pos + tell + amount else -1 end - raise 'new position is outside of file' if new_pos < 0 || new_pos > size + raise ArgumentError, 'new position is outside of file' if new_pos < 0 || new_pos > size @tell = new_pos end @@ -122,42 +109,18 @@ def readline out end - def write(data, &block) - raise WriteError, 'Could not write without lock' unless write_lock_uuid - raise WriteError, 'Could not write empty data' unless data.present? - - _data = data.dup - prev_tell = tell - - until _data.empty? - writable_space = buffer_size - chunk_offset - writing_size = [writable_space, _data.length].min - written_size = write_chunk!(_data.slice!(0...writing_size), &block) + def write(data) + raise ArgumentError, 'Could not write empty data' unless data.present? - @tell += written_size - @size = [tell, size].max + if mode.include?('w') + write_as_overwrite(data) + elsif mode.include?('a') + write_as_append(data) end - - tell - prev_tell end - def truncate(offset, &block) - raise WriteError, 'Could not write without lock' unless write_lock_uuid - raise WriteError, 'Offset is out of bound' if offset > size || offset < 0 - - @tell = size - 1 - - until size == offset - truncatable_space = size - chunk_start - _chunk_offset = (offset <= chunk_start) ? 0 : offset % buffer_size - removed_size = truncate_chunk!(_chunk_offset, &block) - - @tell -= removed_size - @size -= removed_size - end - - @tell = [tell, 0].max - @size = [size, 0].max + def truncate(offset) + raise NotImplementedError end def flush @@ -178,9 +141,6 @@ def get_chunk unless in_range? chunk_store.open(job_id, chunk_index, params_for_store) do |store| @chunk = store.get - - raise FailedToGetChunkError unless chunk && chunk.length > 0 - @chunk_range = (chunk_start...(chunk_start + chunk.length)) end end @@ -188,30 +148,54 @@ def get_chunk @chunk[chunk_offset..buffer_size] end - def write_chunk!(data, &block) - chunk_store.open(job_id, chunk_index, params_for_store) do |store| - written_size = if buffer_size == data.length - store.write!(data) - else - store.append!(data) - end + def write_as_overwrite(data) + raise NotImplementedError, "Overwrite is not supported" + end + + def write_as_append(data) + @tell = size - raise WriteError, 'Written size mismatch' unless data.length == written_size + data_size = data.size + new_tell = tell + data_size + data_offset = 0 - block.call(store) if block_given? + until tell == new_tell + writable_size = buffer_size - chunk_offset + writable_data = data[data_offset...(data_offset + writable_size)] + written_size = write_chunk(writable_data) - written_size + data_offset += written_size + @tell += written_size + @size = [tell, size].max end + + data_size end - def truncate_chunk!(offset, &block) + def write_chunk(data) chunk_store.open(job_id, chunk_index, params_for_store) do |store| - removed_size = store.size - offset - store.truncate!(offset) + with_callbacks(:write_chunk, store) do + written_size = if buffer_size == data.length + store.write!(data) + else + store.append!(data) + end - block.call(store) if block_given? + raise WriteError, 'Written size mismatch' unless data.length == written_size - removed_size + written_size + end + end + end + + def truncate_chunk(offset) + chunk_store.open(job_id, chunk_index, params_for_store) do |store| + with_callbacks(:truncate_chunk, store) do + removed_size = store.size - offset + store.truncate!(offset) + + removed_size + end end end @@ -240,19 +224,15 @@ def chunk_index end def chunks_count - (size / buffer_size) + (has_extra? ? 1 : 0) + (size / buffer_size) end - def has_extra? - (size % buffer_size) > 0 + def first_chunk? + chunk_index == 0 end def last_chunk? - chunks_count == 0 || chunk_index == (chunks_count - 1) || chunk_index == chunks_count - end - - def write_lock_key - "live_trace:operation:write:#{job_id}" + chunks_count == 0 || chunk_index == (chunks_count - 1) end def chunk_store diff --git a/lib/gitlab/ci/trace/chunked_file/concerns/callbacks.rb b/lib/gitlab/ci/trace/chunked_file/concerns/callbacks.rb new file mode 100644 index 000000000000..0a49ac4dbbf0 --- /dev/null +++ b/lib/gitlab/ci/trace/chunked_file/concerns/callbacks.rb @@ -0,0 +1,38 @@ +module Gitlab + module Ci + class Trace + module ChunkedFile + module Concerns + module Callbacks + extend ActiveSupport::Concern + + included do + class_attribute :_before_callbacks, :_after_callbacks, + :instance_writer => false + self._before_callbacks = Hash.new [] + self._after_callbacks = Hash.new [] + end + + def with_callbacks(kind, *args) + self.class._before_callbacks[kind].each { |c| send c, *args } + yield + self.class._after_callbacks[kind].each { |c| send c, *args } + end + + module ClassMethods + def before_callback(kind, callback) + self._before_callbacks = self._before_callbacks. + merge kind => _before_callbacks[kind] + [callback] + end + + def after_callback(kind, callback) + self._after_callbacks = self._after_callbacks. + merge kind => _after_callbacks[kind] + [callback] + end + end + end + end + end + end + end +end diff --git a/lib/gitlab/ci/trace/chunked_file/concerns/errors.rb b/lib/gitlab/ci/trace/chunked_file/concerns/errors.rb new file mode 100644 index 000000000000..ccdb17005e29 --- /dev/null +++ b/lib/gitlab/ci/trace/chunked_file/concerns/errors.rb @@ -0,0 +1,18 @@ +module Gitlab + module Ci + class Trace + module ChunkedFile + module Concerns + module Errors + extend ActiveSupport::Concern + + included do + WriteError = Class.new(StandardError) + FailedToGetChunkError = Class.new(StandardError) + end + end + end + end + end + end +end diff --git a/lib/gitlab/ci/trace/chunked_file/concerns/hooks.rb b/lib/gitlab/ci/trace/chunked_file/concerns/hooks.rb new file mode 100644 index 000000000000..290a3a15805e --- /dev/null +++ b/lib/gitlab/ci/trace/chunked_file/concerns/hooks.rb @@ -0,0 +1,63 @@ +module Gitlab + module Ci + class Trace + module ChunkedFile + module Concerns + module Hooks + extend ActiveSupport::Concern + + included do + class_attribute :_before_methods, :_after_methods, + :instance_writer => false + self._before_methods = Hash.new [] + self._after_methods = Hash.new [] + end + + class_methods do + def before_method(kind, callback) + self._before_methods = self._before_methods. + merge kind => _before_methods[kind] + [callback] + end + + def after_method(kind, callback) + self._after_methods = self._after_methods. + merge kind => _after_methods[kind] + [callback] + end + end + + def method_added(method_name) + return if self.class._before_methods.values.include?(method_name) + return if self.class._after_methods.values.include?(method_name) + return if hooked_methods.include?(method_name) + + add_hooks_to(method_name) + end + + private + + def hooked_methods + @hooked_methods ||= [] + end + + def add_hooks_to(method_name) + hooked_methods << method_name + + original_method = instance_method(method_name) + + # re-define the method, but notice how we reference the original + # method definition + define_method(method_name) do |*args, &block| + self.class._before_methods[method_name].each { |hook| method(hook).call } + + # now invoke the original method + original_method.bind(self).call(*args, &block).tap do + self.class._after_methods[method_name].each { |hook| method(hook).call } + end + end + end + end + end + end + end + end +end diff --git a/lib/gitlab/ci/trace/chunked_file/concerns/opener.rb b/lib/gitlab/ci/trace/chunked_file/concerns/opener.rb new file mode 100644 index 000000000000..9f1f6eefcbc2 --- /dev/null +++ b/lib/gitlab/ci/trace/chunked_file/concerns/opener.rb @@ -0,0 +1,23 @@ +module Gitlab + module Ci + class Trace + module ChunkedFile + module Concerns + module Opener + extend ActiveSupport::Concern + + class_methods do + def open(*args) + stream = self.new(*args) + + yield stream + ensure + stream&.close + end + end + end + end + end + end + end +end diff --git a/lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb b/lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb new file mode 100644 index 000000000000..f87039704666 --- /dev/null +++ b/lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb @@ -0,0 +1,65 @@ +module Gitlab + module Ci + class Trace + module ChunkedFile + module Concerns + module Permissions + extend ActiveSupport::Concern + + included do + PermissionError = Class.new(StandardError) + + attr_reader :write_lock_uuid + + # mode checks + before_method :read, :can_read! + before_method :readline, :can_read! + before_method :each_line, :can_read! + before_method :write, :can_write! + before_method :truncate, :can_write! + + # write_lock + before_method :write, :check_lock! + before_method :truncate, :check_lock! + end + + def initialize(job_id, size, mode = 'rb') + if /(w|a)/ =~ mode + @write_lock_uuid = Gitlab::ExclusiveLease + .new(write_lock_key, timeout: 1.hour.to_i).try_obtain + + raise PermissionError, 'Already opened by another process' unless write_lock_uuid + end + + super + end + + def close + if write_lock_uuid + Gitlab::ExclusiveLease.cancel(write_lock_key, write_lock_uuid) + end + + super + end + + def check_lock! + raise PermissionError, 'Could not write without lock' unless write_lock_uuid + end + + def can_read! + raise IOError, 'not opened for reading' unless /(r|+)/ =~ mode + end + + def can_write! + raise IOError, 'not opened for writing' unless /(w|a)/ =~ mode + end + + def write_lock_key + "live_trace:operation:write:#{job_id}" + end + end + end + end + end + end +end diff --git a/lib/gitlab/ci/trace/chunked_file/live_trace.rb b/lib/gitlab/ci/trace/chunked_file/live_trace.rb index 264bb98ef6c2..5502a6a52366 100644 --- a/lib/gitlab/ci/trace/chunked_file/live_trace.rb +++ b/lib/gitlab/ci/trace/chunked_file/live_trace.rb @@ -10,42 +10,38 @@ def exist?(job_id) end end + after_callback :write_chunk, :stash_to_database + def initialize(job_id, mode) super(job_id, calculate_size(job_id), mode) end - def write(data) - raise NotImplementedError, 'Overwrite is not supported' unless tell == size - - super(data) do |store| - if store.filled? - # Once data is filled into redis, move the data to database - ChunkStore::Database.open(job_id, chunk_index, params_for_store) do |to_store| - to_store.write!(store.get) - store.delete! - end + def stash_to_database(store) + # Once data is filled into redis, move the data to database + if store.filled? && + ChunkStore::Database.open(job_id, chunk_index, params_for_store) do |to_store| + to_store.write!(store.get) + store.delete! end end end + # Efficient process than iterating each def truncate(offset) - super(offset) do |store| - next if chunk_index == 0 - - prev_chunk_index = chunk_index - 1 - - if ChunkStore::Database.exist?(job_id, prev_chunk_index) - # Swap data from Database to Redis to truncate any size than buffer_size - ChunkStore::Database.open(job_id, prev_chunk_index, params_for_store(prev_chunk_index)) do |from_store| - ChunkStore::Redis.open(job_id, prev_chunk_index, params_for_store(prev_chunk_index)) do |to_store| - to_store.write!(from_store.get) - from_store.delete! - end - end - end + if truncate == 0 + self.delete_all(job_id) + elsif offset == size + # no-op + else + raise NotImplementedError, 'Unexpected operation' end end + def delete + ChunkStores::Redis.delete_all(job_id) + ChunkStores::Database.delete_all(job_id) + end + private def calculate_size(job_id) -- GitLab From 3a87bfb29659004d39a03d8116fa65a3f78cae93 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Mon, 2 Apr 2018 04:53:39 +0900 Subject: [PATCH 13/86] Add lock for delete --- lib/gitlab/ci/trace.rb | 2 +- lib/gitlab/ci/trace/chunked_file/chunked_io.rb | 6 +++++- lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb | 3 ++- lib/gitlab/ci/trace/chunked_file/live_trace.rb | 6 +++++- 4 files changed, 13 insertions(+), 4 deletions(-) diff --git a/lib/gitlab/ci/trace.rb b/lib/gitlab/ci/trace.rb index 83ed58f48454..c4000cc787af 100644 --- a/lib/gitlab/ci/trace.rb +++ b/lib/gitlab/ci/trace.rb @@ -110,7 +110,7 @@ def archive! raise ArchiveError, 'Job is not finished yet' unless job.complete? if ChunkedFile::LiveTrace.exist?(job.id) - ChunkedFile::LiveTrace.open(job.id, "wb") do |stream| + ChunkedFile::LiveTrace.open(job.id, 'a+b') do |stream| archive_stream!(stream) stream.delete end diff --git a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb index d23fe2a47d51..8ed9082d6770 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb @@ -128,7 +128,11 @@ def flush end def present? - true + chunk_store.chunks_count(job_id) > 0 + end + + def delete + chunk_store.delete_all end private diff --git a/lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb b/lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb index f87039704666..f364237f07ba 100644 --- a/lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb +++ b/lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb @@ -21,6 +21,7 @@ module Permissions # write_lock before_method :write, :check_lock! before_method :truncate, :check_lock! + before_method :delete, :check_lock! end def initialize(job_id, size, mode = 'rb') @@ -43,7 +44,7 @@ def close end def check_lock! - raise PermissionError, 'Could not write without lock' unless write_lock_uuid + raise PermissionError, 'Could not modify the file without lock' unless write_lock_uuid end def can_read! diff --git a/lib/gitlab/ci/trace/chunked_file/live_trace.rb b/lib/gitlab/ci/trace/chunked_file/live_trace.rb index 5502a6a52366..9b7c187d3d53 100644 --- a/lib/gitlab/ci/trace/chunked_file/live_trace.rb +++ b/lib/gitlab/ci/trace/chunked_file/live_trace.rb @@ -29,7 +29,7 @@ def stash_to_database(store) # Efficient process than iterating each def truncate(offset) if truncate == 0 - self.delete_all(job_id) + delete elsif offset == size # no-op else @@ -37,6 +37,10 @@ def truncate(offset) end end + def present? + self.exist?(job_id) + end + def delete ChunkStores::Redis.delete_all(job_id) ChunkStores::Database.delete_all(job_id) -- GitLab From 3a0f90031863372c7935f15ea43640df6cbe5072 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Mon, 2 Apr 2018 05:03:32 +0900 Subject: [PATCH 14/86] Remove OS changes --- .../chunk_store/object_storage.rb | 84 ------------------- lib/gitlab/ci/trace/chunked_file/http_io.rb | 45 ---------- 2 files changed, 129 deletions(-) delete mode 100644 lib/gitlab/ci/trace/chunked_file/chunk_store/object_storage.rb delete mode 100644 lib/gitlab/ci/trace/chunked_file/http_io.rb diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/object_storage.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/object_storage.rb deleted file mode 100644 index 95aaa9f9e2cb..000000000000 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/object_storage.rb +++ /dev/null @@ -1,84 +0,0 @@ -module Gitlab - module Ci - class Trace - module ChunkedFile - module ChunkStore - class ObjectStorage < Base - class << self - def open(job_id, chunk_index, **params) - raise ArgumentError unless job_id && chunk_index - - relative_path = relative_path(job_id, chunk_index) - store = self.new(relative_path, params) - - yield store - ensure - store&.close - end - - def exist?(job_id, chunk_index) - raise NotImplementedError - end - - def chunks_count(job_id) - raise NotImplementedError - end - - def relative_path(job_id, chunk_index) - "#{job_id}/#{chunk_index}.chunk" - end - end - - FailedToGetChunkError = Class.new(StandardError) - - attr_reader :relative_path - - def initialize(relative_path, **params) - super - - @relative_path = relative_path - end - - def close - @relative_path = nil - end - - ## TODO: Carrierwave::Fog integration - def get - response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == 'https') do |http| - request = Net::HTTP::Get.new(uri) - request.set_range(chunk_start, buffer_size) - http.request(request) - end - - raise FailedToGetChunkError unless response.code == '200' || response.code == '206' - - response.body.force_encoding(Encoding::BINARY) - end - - def size - raise NotImplementedError - end - - def write!(data) - raise NotImplementedError, 'Partial write is not supported' unless buffer_size == data.length - raise NotImplementedError - end - - def append!(data) - raise NotImplementedError - end - - def truncate!(offset) - raise NotImplementedError - end - - def delete! - raise NotImplementedError - end - end - end - end - end - end -end diff --git a/lib/gitlab/ci/trace/chunked_file/http_io.rb b/lib/gitlab/ci/trace/chunked_file/http_io.rb deleted file mode 100644 index 32b2224a18b3..000000000000 --- a/lib/gitlab/ci/trace/chunked_file/http_io.rb +++ /dev/null @@ -1,45 +0,0 @@ -module Gitlab - module Ci - class Trace - module ChunkedFile - class HttpIO < ChunkedIO - BUFFER_SIZE = 128.kilobytes - - InvalidURLError = Class.new(StandardError) - - attr_reader :uri - - def initialize(job_id, url, size, mode) - raise InvalidURLError unless ::Gitlab::UrlSanitizer.valid?(url) - - @uri = URI(url) - - super(job_id, size, mode) - end - - def write(data) - raise NotImplementedError - end - - def truncate(offset) - raise NotImplementedError - end - - def flush - raise NotImplementedError - end - - private - - def chunk_store - ChunkStores::ObjectStorage - end - - def params_for_store - super.merge( { uri: uri } ) - end - end - end - end - end -end -- GitLab From ebf69adc210423dc73c1a87c6a351b56a2772b15 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Mon, 2 Apr 2018 05:22:22 +0900 Subject: [PATCH 15/86] Optimize chunk_store selector --- lib/gitlab/ci/trace/chunked_file/chunked_io.rb | 2 +- lib/gitlab/ci/trace/chunked_file/live_trace.rb | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb index 8ed9082d6770..5a2c56a687b1 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb @@ -236,7 +236,7 @@ def first_chunk? end def last_chunk? - chunks_count == 0 || chunk_index == (chunks_count - 1) + (chunk_start...chunk_end).include?(tell) end def chunk_store diff --git a/lib/gitlab/ci/trace/chunked_file/live_trace.rb b/lib/gitlab/ci/trace/chunked_file/live_trace.rb index 9b7c187d3d53..6976686553ff 100644 --- a/lib/gitlab/ci/trace/chunked_file/live_trace.rb +++ b/lib/gitlab/ci/trace/chunked_file/live_trace.rb @@ -18,7 +18,7 @@ def initialize(job_id, mode) def stash_to_database(store) # Once data is filled into redis, move the data to database - if store.filled? && + if store.filled? ChunkStore::Database.open(job_id, chunk_index, params_for_store) do |to_store| to_store.write!(store.get) store.delete! @@ -28,7 +28,7 @@ def stash_to_database(store) # Efficient process than iterating each def truncate(offset) - if truncate == 0 + if offset == 0 delete elsif offset == size # no-op @@ -54,7 +54,7 @@ def calculate_size(job_id) end def chunk_store - if last_chunk? + if last_chunk? || eof? ChunkStore::Redis else ChunkStore::Database -- GitLab From 3a99a6b9033b669d062e7aa63a680c86240fac6e Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Mon, 2 Apr 2018 16:52:18 +0900 Subject: [PATCH 16/86] Consolidate ChunkedIO --- .../ci/trace/chunked_file/chunk_store/base.rb | 14 + .../chunked_file/chunk_store/database.rb | 10 +- .../trace/chunked_file/chunk_store/redis.rb | 39 +- .../ci/trace/chunked_file/chunked_io.rb | 32 +- .../ci/trace/chunked_file/concerns/hooks.rb | 63 ---- .../chunked_file/concerns/permissions.rb | 69 +++- .../ci/trace/chunked_file/live_trace.rb | 4 - .../chunked_file/chunk_store/database_spec.rb | 95 +++-- .../chunked_file/chunk_store/redis_spec.rb | 147 ++++---- .../ci/trace/chunked_file/chunked_io_spec.rb | 3 +- spec/support/chunked_io/chunked_io_helpers.rb | 34 +- .../chunked_io_shared_examples.rb | 353 ++++++++---------- 12 files changed, 427 insertions(+), 436 deletions(-) delete mode 100644 lib/gitlab/ci/trace/chunked_file/concerns/hooks.rb diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb index e2645918a40c..6e104a6d7641 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb @@ -22,18 +22,32 @@ def size raise NotImplementedError end + # Write data to chunk store. Always overwrite. + # + # @param [String] data + # @return [Fixnum] length of the data after writing def write!(data) raise NotImplementedError end + # Append data to chunk store + # + # @param [String] data + # @return [Fixnum] length of the appended def append!(data) raise NotImplementedError end + # Truncate data to chunk store + # + # @param [String] offset def truncate!(offset) raise NotImplementedError end + # Delete data from chunk store + # + # @param [String] offset def delete! raise NotImplementedError end diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb index a7db214f4285..3c2805e83f74 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb @@ -48,6 +48,8 @@ def close end def get + puts "#{self.class.name} - #{__callee__}: params[:chunk_index]: #{params[:chunk_index]}" + job_trace_chunk.data end @@ -56,9 +58,10 @@ def size end def write!(data) + raise NotImplementedError, 'Partial writing is not supported' unless params[:buffer_size] == data&.length + raise NotImplementedError, 'UPDATE (Overwriting data) is not supported' if job_trace_chunk.data + puts "#{self.class.name} - #{__callee__}: data.length: #{data.length.inspect} params[:chunk_index]: #{params[:chunk_index]}" - raise NotImplementedError, 'Partial write is not supported' unless params[:buffer_size] == data&.length - raise NotImplementedError, 'UPDATE is not supported' if job_trace_chunk.data job_trace_chunk.data = data job_trace_chunk.save! @@ -75,7 +78,10 @@ def truncate!(offset) end def delete! + raise ActiveRecord::RecordNotFound, 'Could not find deletable record' unless job_trace_chunk.persisted? + puts "#{self.class.name} - #{__callee__}: params[:chunk_index]: #{params[:chunk_index]}" + job_trace_chunk.destroy! end end diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb index cb45cd5fba54..c87275319d95 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb @@ -51,6 +51,9 @@ def buffer_key(job_id, chunk_index) end end + BufferKeyNotFoundError = Class.new(StandardError) + WriteError = Class.new(StandardError) + attr_reader :buffer_key def initialize(buffer_key, **params) @@ -64,6 +67,8 @@ def close end def get + puts "#{self.class.name} - #{__callee__}: params[:chunk_index]: #{params[:chunk_index]}" + Gitlab::Redis::Cache.with do |redis| redis.get(buffer_key) end @@ -76,35 +81,47 @@ def size end def write!(data) + raise ArgumentError, 'Could not write empty data' unless data.present? + puts "#{self.class.name} - #{__callee__}: data.length: #{data.length.inspect} params[:chunk_index]: #{params[:chunk_index]}" Gitlab::Redis::Cache.with do |redis| - redis.set(buffer_key, data) + unless redis.set(buffer_key, data) == 'OK' + raise WriteError, 'Failed to write' + end + redis.strlen(buffer_key) end end def append!(data) + raise ArgumentError, 'Could not write empty data' unless data.present? + puts "#{self.class.name} - #{__callee__}: data.length: #{data.length.inspect} params[:chunk_index]: #{params[:chunk_index]}" Gitlab::Redis::Cache.with do |redis| - redis.append(buffer_key, data) - data.length + raise BufferKeyNotFoundError, 'Buffer key is not found' unless redis.exists(buffer_key) + + original_size = size + new_size = redis.append(buffer_key, data) + appended_size = new_size - original_size + + raise WriteError, 'Failed to append' unless appended_size == data.length + + appended_size end end def truncate!(offset) - puts "#{self.class.name} - #{__callee__}: offset: #{offset.inspect} params[:chunk_index]: #{params[:chunk_index]}" - Gitlab::Redis::Cache.with do |redis| - return 0 unless redis.exists(buffer_key) - - truncated_data = redis.getrange(buffer_key, 0, offset) - redis.set(buffer_key, truncated_data) - end + raise NotImplementedError end def delete! puts "#{self.class.name} - #{__callee__}: params[:chunk_index]: #{params[:chunk_index]}" Gitlab::Redis::Cache.with do |redis| - redis.del(buffer_key) + raise BufferKeyNotFoundError, 'Buffer key is not found' unless redis.exists(buffer_key) + + unless redis.del(buffer_key) == 1 + raise WriteError, 'Failed to delete' + end end end end diff --git a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb index 5a2c56a687b1..f28955264c86 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb @@ -8,7 +8,7 @@ module Ci class Trace module ChunkedFile class ChunkedIO - extend ChunkedFile::Concerns::Opener + # extend ChunkedFile::Concerns::Opener include ChunkedFile::Concerns::Errors include ChunkedFile::Concerns::Hooks include ChunkedFile::Concerns::Callbacks @@ -22,13 +22,21 @@ class ChunkedIO alias_method :pos, :tell - def initialize(job_id, size, mode = 'rb') - @size = size + def initialize(job_id, size = nil, mode = 'rb', &block) + raise NotImplementedError, "Mode 'w' is not supported" if mode.include?('w') + + @size = size || calculate_size(job_id) @tell = 0 @job_id = job_id @mode = mode - raise NotImplementedError, "Mode 'w' is not supported" if mode.include?('w') + if block_given? + begin + yield self + ensure + self.close + end + end end def close @@ -128,7 +136,7 @@ def flush end def present? - chunk_store.chunks_count(job_id) > 0 + chunks_count > 0 end def delete @@ -177,19 +185,21 @@ def write_as_append(data) end def write_chunk(data) + written_size = 0 + chunk_store.open(job_id, chunk_index, params_for_store) do |store| with_callbacks(:write_chunk, store) do - written_size = if buffer_size == data.length + written_size = if buffer_size == data.length || store.size == 0 store.write!(data) else store.append!(data) end raise WriteError, 'Written size mismatch' unless data.length == written_size - - written_size end end + + written_size end def truncate_chunk(offset) @@ -228,7 +238,7 @@ def chunk_index end def chunks_count - (size / buffer_size) + (size / buffer_size.to_f).ceil end def first_chunk? @@ -246,6 +256,10 @@ def chunk_store def buffer_size raise NotImplementedError end + + def calculate_size(job_id) + chunk_store.chunks_size(job_id) + end end end end diff --git a/lib/gitlab/ci/trace/chunked_file/concerns/hooks.rb b/lib/gitlab/ci/trace/chunked_file/concerns/hooks.rb deleted file mode 100644 index 290a3a15805e..000000000000 --- a/lib/gitlab/ci/trace/chunked_file/concerns/hooks.rb +++ /dev/null @@ -1,63 +0,0 @@ -module Gitlab - module Ci - class Trace - module ChunkedFile - module Concerns - module Hooks - extend ActiveSupport::Concern - - included do - class_attribute :_before_methods, :_after_methods, - :instance_writer => false - self._before_methods = Hash.new [] - self._after_methods = Hash.new [] - end - - class_methods do - def before_method(kind, callback) - self._before_methods = self._before_methods. - merge kind => _before_methods[kind] + [callback] - end - - def after_method(kind, callback) - self._after_methods = self._after_methods. - merge kind => _after_methods[kind] + [callback] - end - end - - def method_added(method_name) - return if self.class._before_methods.values.include?(method_name) - return if self.class._after_methods.values.include?(method_name) - return if hooked_methods.include?(method_name) - - add_hooks_to(method_name) - end - - private - - def hooked_methods - @hooked_methods ||= [] - end - - def add_hooks_to(method_name) - hooked_methods << method_name - - original_method = instance_method(method_name) - - # re-define the method, but notice how we reference the original - # method definition - define_method(method_name) do |*args, &block| - self.class._before_methods[method_name].each { |hook| method(hook).call } - - # now invoke the original method - original_method.bind(self).call(*args, &block).tap do - self.class._after_methods[method_name].each { |hook| method(hook).call } - end - end - end - end - end - end - end - end -end diff --git a/lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb b/lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb index f364237f07ba..11d8fd0cdfc9 100644 --- a/lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb +++ b/lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb @@ -6,28 +6,19 @@ module Concerns module Permissions extend ActiveSupport::Concern + WRITABLE_MODE = %w[a] + READABLE_MODE = %w[r +] + included do PermissionError = Class.new(StandardError) attr_reader :write_lock_uuid - - # mode checks - before_method :read, :can_read! - before_method :readline, :can_read! - before_method :each_line, :can_read! - before_method :write, :can_write! - before_method :truncate, :can_write! - - # write_lock - before_method :write, :check_lock! - before_method :truncate, :check_lock! - before_method :delete, :check_lock! end def initialize(job_id, size, mode = 'rb') - if /(w|a)/ =~ mode + if WRITABLE_MODE.any? { |m| mode.include?(m) } @write_lock_uuid = Gitlab::ExclusiveLease - .new(write_lock_key, timeout: 1.hour.to_i).try_obtain + .new(write_lock_key(job_id), timeout: 1.hour.to_i).try_obtain raise PermissionError, 'Already opened by another process' unless write_lock_uuid end @@ -37,25 +28,63 @@ def initialize(job_id, size, mode = 'rb') def close if write_lock_uuid - Gitlab::ExclusiveLease.cancel(write_lock_key, write_lock_uuid) + Gitlab::ExclusiveLease.cancel(write_lock_key(job_id), write_lock_uuid) end super end - def check_lock! - raise PermissionError, 'Could not modify the file without lock' unless write_lock_uuid + def read(*args) + can_read! + + super + end + + def readline(*args) + can_read! + + super + end + + def each_line(*args) + can_read! + + super end + def write(*args) + can_write! + + super + end + + def truncate(*args) + can_write! + + super + end + + def delete(*args) + can_write! + + super + end + + private + def can_read! - raise IOError, 'not opened for reading' unless /(r|+)/ =~ mode + unless READABLE_MODE.any? { |m| mode.include?(m) } + raise IOError, 'not opened for reading' + end end def can_write! - raise IOError, 'not opened for writing' unless /(w|a)/ =~ mode + unless WRITABLE_MODE.any? { |m| mode.include?(m) } + raise IOError, 'not opened for writing' + end end - def write_lock_key + def write_lock_key(job_id) "live_trace:operation:write:#{job_id}" end end diff --git a/lib/gitlab/ci/trace/chunked_file/live_trace.rb b/lib/gitlab/ci/trace/chunked_file/live_trace.rb index 6976686553ff..fe2205144cb0 100644 --- a/lib/gitlab/ci/trace/chunked_file/live_trace.rb +++ b/lib/gitlab/ci/trace/chunked_file/live_trace.rb @@ -12,10 +12,6 @@ def exist?(job_id) after_callback :write_chunk, :stash_to_database - def initialize(job_id, mode) - super(job_id, calculate_size(job_id), mode) - end - def stash_to_database(store) # Once data is filled into redis, move the data to database if store.filled? diff --git a/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/database_spec.rb b/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/database_spec.rb index 74fb81d7a535..c84398ca4818 100644 --- a/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/database_spec.rb +++ b/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/database_spec.rb @@ -1,13 +1,13 @@ require 'spec_helper' describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database do + let(:job) { create(:ci_build) } let(:job_id) { job.id } let(:chunk_index) { 0 } let(:buffer_size) { 256 } let(:job_trace_chunk) { ::Ci::JobTraceChunk.new(job_id: job_id, chunk_index: chunk_index) } let(:params) { { buffer_size: buffer_size } } - let(:trace) { 'A' * buffer_size } - let(:job) { create(:ci_build) } + let(:data) { 'A' * buffer_size } describe '.open' do subject { described_class.open(job_id, chunk_index, params) } @@ -35,7 +35,7 @@ context 'when job_trace_chunk exists' do before do - described_class.new(job_trace_chunk, params).write!(trace) + described_class.new(job_trace_chunk, params).write!(data) end it { is_expected.to be_truthy } @@ -51,17 +51,17 @@ context 'when job_trace_chunk exists' do before do - described_class.new(job_trace_chunk, params).write!(trace) + described_class.new(job_trace_chunk, params).write!(data) end it { is_expected.to eq(1) } context 'when two chunks exists' do let(:job_trace_chunk_2) { ::Ci::JobTraceChunk.new(job_id: job_id, chunk_index: chunk_index + 1) } - let(:trace_2) { 'B' * buffer_size } + let(:data_2) { 'B' * buffer_size } before do - described_class.new(job_trace_chunk_2, params).write!(trace_2) + described_class.new(job_trace_chunk_2, params).write!(data_2) end it { is_expected.to eq(2) } @@ -78,18 +78,18 @@ context 'when job_trace_chunk exists' do before do - described_class.new(job_trace_chunk, params).write!(trace) + described_class.new(job_trace_chunk, params).write!(data) end - it { is_expected.to eq(trace.length) } + it { is_expected.to eq(data.length) } context 'when two chunks exists' do let(:job_trace_chunk_2) { ::Ci::JobTraceChunk.new(job_id: job_id, chunk_index: chunk_index + 1) } - let(:trace_2) { 'B' * buffer_size } - let(:chunks_size) { trace.length + trace_2.length } + let(:data_2) { 'B' * buffer_size } + let(:chunks_size) { data.length + data_2.length } before do - described_class.new(job_trace_chunk_2, params).write!(trace_2) + described_class.new(job_trace_chunk_2, params).write!(data_2) end it { is_expected.to eq(chunks_size) } @@ -101,15 +101,48 @@ end end + describe '.delete_all' do + subject { described_class.delete_all(job_id) } + + context 'when job_trace_chunk exists' do + before do + described_class.new(job_trace_chunk, params).write!(data) + end + + it 'deletes all' do + expect { subject }.to change { described_class.chunks_count(job_id) }.by(-1) + end + + context 'when two chunks exists' do + let(:job_trace_chunk_2) { ::Ci::JobTraceChunk.new(job_id: job_id, chunk_index: chunk_index + 1) } + let(:data_2) { 'B' * buffer_size } + + before do + described_class.new(job_trace_chunk_2, params).write!(data_2) + end + + it 'deletes all' do + expect { subject }.to change { described_class.chunks_count(job_id) }.by(-2) + end + end + end + + context 'when buffer_key does not exist' do + it 'deletes all' do + expect { subject }.not_to change { described_class.chunks_count(job_id) } + end + end + end + describe '#get' do subject { described_class.new(job_trace_chunk, params).get } context 'when job_trace_chunk exists' do before do - described_class.new(job_trace_chunk, params).write!(trace) + described_class.new(job_trace_chunk, params).write!(data) end - it { is_expected.to eq(trace) } + it { is_expected.to eq(data) } end context 'when job_trace_chunk does not exist' do @@ -122,10 +155,10 @@ context 'when job_trace_chunk exists' do before do - described_class.new(job_trace_chunk, params).write!(trace) + described_class.new(job_trace_chunk, params).write!(data) end - it { is_expected.to eq(trace.length) } + it { is_expected.to eq(data.length) } end context 'when job_trace_chunk does not exist' do @@ -134,45 +167,39 @@ end describe '#write!' do - subject { described_class.new(job_trace_chunk, params).write!(trace) } + subject { described_class.new(job_trace_chunk, params).write!(data) } context 'when job_trace_chunk exists' do before do - described_class.new(job_trace_chunk, params).write!(trace) + described_class.new(job_trace_chunk, params).write!(data) end - it { expect { subject }.to raise_error('UPDATE is not supported') } + it { expect { subject }.to raise_error('UPDATE (Overwriting data) is not supported') } end context 'when job_trace_chunk does not exist' do let(:expected_data) { ::Ci::JobTraceChunk.find_by(job_id: job_id, chunk_index: chunk_index).data } it 'writes' do - is_expected.to eq(trace.length) + is_expected.to eq(data.length) - expect(expected_data).to eq(trace) + expect(expected_data).to eq(data) end end context 'when data is nil' do - let(:trace) { nil } + let(:data) { nil } - it { expect { subject }.to raise_error('Partial write is not supported') } + it { expect { subject }.to raise_error('Partial writing is not supported') } end end - describe '#truncate!' do - subject { described_class.new(job_trace_chunk, params).truncate!(0) } - - it { expect { subject }.to raise_error(NotImplementedError) } - end - describe '#delete!' do subject { described_class.new(job_trace_chunk, params).delete! } context 'when job_trace_chunk exists' do before do - described_class.new(job_trace_chunk, params).write!(trace) + described_class.new(job_trace_chunk, params).write!(data) end it 'deletes' do @@ -187,14 +214,8 @@ end context 'when job_trace_chunk does not exist' do - it 'deletes' do - expect(::Ci::JobTraceChunk.exists?(job_id: job_id, chunk_index: chunk_index)) - .to be_falsy - - subject - - expect(::Ci::JobTraceChunk.exists?(job_id: job_id, chunk_index: chunk_index)) - .to be_falsy + it 'raises an error' do + expect { subject }.to raise_error('Could not find deletable record') end end end diff --git a/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/redis_spec.rb b/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/redis_spec.rb index 83423ac2a33b..f1fb64225c90 100644 --- a/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/redis_spec.rb +++ b/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/redis_spec.rb @@ -1,12 +1,13 @@ require 'spec_helper' describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_cache do - let(:job_id) { 1 } + let(:job) { create(:ci_build) } + let(:job_id) { job.id } let(:chunk_index) { 0 } let(:buffer_size) { 128.kilobytes } let(:buffer_key) { described_class.buffer_key(job_id, chunk_index) } let(:params) { { buffer_size: buffer_size } } - let(:trace) { 'Here is the trace' } + let(:data) { 'Here is the trace' } describe '.open' do subject { described_class.open(job_id, chunk_index, params) } @@ -34,7 +35,7 @@ context 'when buffer_key exists' do before do - described_class.new(buffer_key, params).write!(trace) + described_class.new(buffer_key, params).write!(data) end it { is_expected.to be_truthy } @@ -50,17 +51,17 @@ context 'when buffer_key exists' do before do - described_class.new(buffer_key, params).write!(trace) + described_class.new(buffer_key, params).write!(data) end it { is_expected.to eq(1) } context 'when two chunks exists' do let(:buffer_key_2) { described_class.buffer_key(job_id, chunk_index + 1) } - let(:trace_2) { 'Another trace' } + let(:data_2) { 'Another data' } before do - described_class.new(buffer_key_2, params).write!(trace_2) + described_class.new(buffer_key_2, params).write!(data_2) end it { is_expected.to eq(2) } @@ -77,18 +78,18 @@ context 'when buffer_key exists' do before do - described_class.new(buffer_key, params).write!(trace) + described_class.new(buffer_key, params).write!(data) end - it { is_expected.to eq(trace.length) } + it { is_expected.to eq(data.length) } context 'when two chunks exists' do let(:buffer_key_2) { described_class.buffer_key(job_id, chunk_index + 1) } - let(:trace_2) { 'Another trace' } - let(:chunks_size) { trace.length + trace_2.length } + let(:data_2) { 'Another data' } + let(:chunks_size) { data.length + data_2.length } before do - described_class.new(buffer_key_2, params).write!(trace_2) + described_class.new(buffer_key_2, params).write!(data_2) end it { is_expected.to eq(chunks_size) } @@ -100,6 +101,39 @@ end end + describe '.delete_all' do + subject { described_class.delete_all(job_id) } + + context 'when buffer_key exists' do + before do + described_class.new(buffer_key, params).write!(data) + end + + it 'deletes all' do + expect { subject }.to change { described_class.chunks_count(job_id) }.by(-1) + end + + context 'when two chunks exists' do + let(:buffer_key_2) { described_class.buffer_key(job_id, chunk_index + 1) } + let(:data_2) { 'Another data' } + + before do + described_class.new(buffer_key_2, params).write!(data_2) + end + + it 'deletes all' do + expect { subject }.to change { described_class.chunks_count(job_id) }.by(-2) + end + end + end + + context 'when buffer_key does not exist' do + it 'deletes all' do + expect { subject }.not_to change { described_class.chunks_count(job_id) } + end + end + end + describe '.buffer_key' do subject { described_class.buffer_key(job_id, chunk_index) } @@ -111,10 +145,10 @@ context 'when buffer_key exists' do before do - described_class.new(buffer_key, params).write!(trace) + described_class.new(buffer_key, params).write!(data) end - it { is_expected.to eq(trace) } + it { is_expected.to eq(data) } end context 'when buffer_key does not exist' do @@ -127,10 +161,10 @@ context 'when buffer_key exists' do before do - described_class.new(buffer_key, params).write!(trace) + described_class.new(buffer_key, params).write!(data) end - it { is_expected.to eq(trace.length) } + it { is_expected.to eq(data.length) } end context 'when buffer_key does not exist' do @@ -139,91 +173,72 @@ end describe '#write!' do - subject { described_class.new(buffer_key, params).write!(trace) } + subject { described_class.new(buffer_key, params).write!(data) } context 'when buffer_key exists' do before do - described_class.new(buffer_key, params).write!('Already data in the chunk') + described_class.new(buffer_key, params).write!('Already data in the data') end it 'overwrites' do - is_expected.to eq(trace.length) + is_expected.to eq(data.length) Gitlab::Redis::Cache.with do |redis| - expect(redis.get(buffer_key)).to eq(trace) + expect(redis.get(buffer_key)).to eq(data) end end end context 'when buffer_key does not exist' do it 'writes' do - is_expected.to eq(trace.length) + is_expected.to eq(data.length) Gitlab::Redis::Cache.with do |redis| - expect(redis.get(buffer_key)).to eq(trace) + expect(redis.get(buffer_key)).to eq(data) end end end context 'when data is nil' do - let(:trace) { nil } + let(:data) { nil } it 'clears value' do - is_expected.to eq(0) + expect { described_class.new(buffer_key, params).write!(data) } + .to raise_error('Could not write empty data') end end end - describe '#truncate!' do - subject { described_class.new(buffer_key, params).truncate!(offset) } - - let(:offset) { 5 } + describe '#append!' do + subject { described_class.new(buffer_key, params).append!(data) } context 'when buffer_key exists' do + let(:written_chunk) { 'Already data in the data' } + before do - described_class.new(buffer_key, params).write!(trace) + described_class.new(buffer_key, params).write!(written_chunk) end - it 'truncates' do - Gitlab::Redis::Cache.with do |redis| - expect(redis.get(buffer_key)).to eq(trace) - end - - subject + it 'appends' do + is_expected.to eq(data.length) Gitlab::Redis::Cache.with do |redis| - expect(redis.get(buffer_key)).to eq(trace.slice(0..offset)) - end - end - - context 'when offset is larger than data size' do - let(:offset) { 100 } - - it 'truncates' do - Gitlab::Redis::Cache.with do |redis| - expect(redis.get(buffer_key)).to eq(trace) - end - - subject - - Gitlab::Redis::Cache.with do |redis| - expect(redis.get(buffer_key)).to eq(trace.slice(0..offset)) - end + expect(redis.get(buffer_key)).to eq(written_chunk + data) end end end context 'when buffer_key does not exist' do - it 'truncates' do - Gitlab::Redis::Cache.with do |redis| - expect(redis.get(buffer_key)).to be_nil - end + it 'raises an error' do + expect { subject }.to raise_error(described_class::BufferKeyNotFoundError) + end + end - subject + context 'when data is nil' do + let(:data) { nil } - Gitlab::Redis::Cache.with do |redis| - expect(redis.get(buffer_key)).to be_nil - end + it 'raises an error' do + expect { subject }.to raise_error('Could not write empty data') end end end @@ -233,7 +248,7 @@ context 'when buffer_key exists' do before do - described_class.new(buffer_key, params).write!(trace) + described_class.new(buffer_key, params).write!(data) end it 'deletes' do @@ -250,16 +265,8 @@ end context 'when buffer_key does not exist' do - it 'deletes' do - Gitlab::Redis::Cache.with do |redis| - expect(redis.exists(buffer_key)).to be_falsy - end - - subject - - Gitlab::Redis::Cache.with do |redis| - expect(redis.exists(buffer_key)).to be_falsy - end + it 'raises an error' do + expect { subject }.to raise_error(described_class::BufferKeyNotFoundError) end end end diff --git a/spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb b/spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb index 048cad9f2e05..ffee27ca6e2b 100644 --- a/spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb +++ b/spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb @@ -3,10 +3,9 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkedIO, :clean_gitlab_redis_cache do include ChunkedIOHelpers - let(:chunked_io) { described_class.new(job_id, size, mode) } + let(:chunked_io) { described_class.new(job_id, nil, mode) } let(:job) { create(:ci_build) } let(:job_id) { job.id } - let(:size) { sample_trace_size } let(:mode) { 'rb' } describe 'ChunkStore is Redis', :partial_support do diff --git a/spec/support/chunked_io/chunked_io_helpers.rb b/spec/support/chunked_io/chunked_io_helpers.rb index c12ad743c001..35b13bccdb4f 100644 --- a/spec/support/chunked_io/chunked_io_helpers.rb +++ b/spec/support/chunked_io/chunked_io_helpers.rb @@ -1,6 +1,6 @@ module ChunkedIOHelpers def fill_trace_to_chunks(data) - stream = described_class.new(job_id, data.length, 'wb') + stream = described_class.new(job_id, nil, 'a+b') stream.write(data) stream.close end @@ -13,27 +13,23 @@ def sample_trace_raw end end - def sample_trace_size - sample_trace_raw.length - end - - def sample_trace_raw_for_live_trace - File.read(expand_fixture_path('trace/sample_trace')) - end + # def sample_trace_raw_for_live_trace + # File.read(expand_fixture_path('trace/sample_trace')) + # end - def sample_trace_size_for_live_trace - sample_trace_raw_for_live_trace.length - end + # def sample_trace_size_for_live_trace + # sample_trace_raw_for_live_trace.length + # end - def fill_trace_to_chunks_for_live_trace(data) - stream = described_class.new(job_id, 'wb') - stream.write(data) - stream.close - end + # def fill_trace_to_chunks_for_live_trace(data) + # stream = described_class.new(job_id, 'a+b') + # stream.write(data) + # stream.close + # end - def stub_chunk_store_get_failed - allow_any_instance_of(chunk_store).to receive(:get).and_return(nil) - end + # def stub_chunk_store_get_failed + # allow_any_instance_of(chunk_store).to receive(:get).and_return(nil) + # end def set_smaller_buffer_size_than(file_size) blocks = (file_size / 128) diff --git a/spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb index 8bb3a6d8ff9a..98199995182e 100644 --- a/spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb +++ b/spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb @@ -8,53 +8,112 @@ let(:mode) { 'rb' } it 'raises no exception' do - described_class.new(job_id, size, mode) - - expect { described_class.new(job_id, size, mode) }.not_to raise_error + expect { described_class.new(job_id, nil, mode) }.not_to raise_error + expect { described_class.new(job_id, nil, mode) }.not_to raise_error end end - context 'when mode is write' do + context 'when mode is append' do let(:mode) { 'a+b' } it 'raises an exception' do - described_class.new(job_id, size, mode) - - expect { described_class.new(job_id, size, mode) }.to raise_error('Already opened by another process') + expect { described_class.new(job_id, nil, mode) }.not_to raise_error + expect { described_class.new(job_id, nil, mode) }.to raise_error('Already opened by another process') end context 'when closed after open' do it 'does not raise an exception' do - described_class.new(job_id, size, mode).close - - expect { described_class.new(job_id, size, mode) }.not_to raise_error + expect { described_class.new(job_id, nil, mode).close }.not_to raise_error + expect { described_class.new(job_id, nil, mode) }.not_to raise_error end end end + + context 'when mode is write' do + let(:mode) { 'wb' } + + it 'raises no exception' do + expect { described_class.new(job_id, nil, mode) }.to raise_error("Mode 'w' is not supported") + end + end + end + + describe 'Permissions', :partial_support do + before do + fill_trace_to_chunks(sample_trace_raw) + end + + context "when mode is 'a+b'" do + let(:mode) { 'a+b' } + + it 'can write' do + expect { described_class.new(job_id, nil, mode).write('abc') } + .not_to raise_error + end + + it 'can read' do + expect { described_class.new(job_id, nil, mode).read(10) } + .not_to raise_error + end + end + + context "when mode is 'ab'" do + let(:mode) { 'ab' } + + it 'can write' do + expect { described_class.new(job_id, nil, mode).write('abc') } + .not_to raise_error + end + + it 'can not read' do + expect { described_class.new(job_id, nil, mode).read(10) } + .to raise_error('not opened for reading') + end + end + + context "when mode is 'rb'" do + let(:mode) { 'rb' } + + it 'can not write' do + expect { described_class.new(job_id, nil, mode).write('abc') } + .to raise_error('not opened for writing') + end + + it 'can read' do + expect { described_class.new(job_id, nil, mode).read(10) } + .not_to raise_error + end + end end describe '#seek' do subject { chunked_io.seek(pos, where) } + before do + set_smaller_buffer_size_than(sample_trace_raw.length) + fill_trace_to_chunks(sample_trace_raw) + end + context 'when moves pos to end of the file' do let(:pos) { 0 } let(:where) { IO::SEEK_END } - it { is_expected.to eq(size) } + it { is_expected.to eq(sample_trace_raw.length) } end context 'when moves pos to middle of the file' do - let(:pos) { size / 2 } + let(:pos) { sample_trace_raw.length / 2 } let(:where) { IO::SEEK_SET } - it { is_expected.to eq(size / 2) } + it { is_expected.to eq(pos) } end context 'when moves pos around' do it 'matches the result' do expect(chunked_io.seek(0)).to eq(0) expect(chunked_io.seek(100, IO::SEEK_CUR)).to eq(100) - expect { chunked_io.seek(size + 1, IO::SEEK_CUR) }.to raise_error('new position is outside of file') + expect { chunked_io.seek(sample_trace_raw.length + 1, IO::SEEK_CUR) } + .to raise_error('new position is outside of file') end end end @@ -62,9 +121,14 @@ describe '#eof?' do subject { chunked_io.eof? } + before do + set_smaller_buffer_size_than(sample_trace_raw.length) + fill_trace_to_chunks(sample_trace_raw) + end + context 'when current pos is at end of the file' do before do - chunked_io.seek(size, IO::SEEK_SET) + chunked_io.seek(sample_trace_raw.length, IO::SEEK_SET) end it { is_expected.to be_truthy } @@ -84,39 +148,39 @@ context 'when buffer size is smaller than file size' do before do - set_smaller_buffer_size_than(size) + set_smaller_buffer_size_than(sample_trace_raw.length) fill_trace_to_chunks(sample_trace_raw) end it 'yields lines' do - expect { |b| described_class.new(job_id, size, 'rb').each_line(&b) } + expect { |b| described_class.new(job_id, nil, 'rb').each_line(&b) } .to yield_successive_args(*string_io.each_line.to_a) end end context 'when buffer size is larger than file size', :partial_support do before do - set_larger_buffer_size_than(size) + set_larger_buffer_size_than(sample_trace_raw.length) fill_trace_to_chunks(sample_trace_raw) end it 'calls get_chunk only once' do expect(chunk_store).to receive(:open).once.and_call_original - described_class.new(job_id, size, 'rb').each_line { |line| } + described_class.new(job_id, nil, 'rb').each_line { |line| } end end end describe '#read' do - subject { described_class.new(job_id, size, 'rb').read(length) } + subject { described_class.new(job_id, nil, 'rb').read(length) } - context 'when read whole size' do + context 'when read the whole size' do let(:length) { nil } context 'when buffer size is smaller than file size' do before do - set_smaller_buffer_size_than(size) + set_smaller_buffer_size_than(sample_trace_raw.length) fill_trace_to_chunks(sample_trace_raw) end @@ -127,7 +191,7 @@ context 'when buffer size is larger than file size', :partial_support do before do - set_larger_buffer_size_than(size) + set_larger_buffer_size_than(sample_trace_raw.length) fill_trace_to_chunks(sample_trace_raw) end @@ -142,7 +206,7 @@ context 'when buffer size is smaller than file size' do before do - set_smaller_buffer_size_than(size) + set_smaller_buffer_size_than(sample_trace_raw.length) fill_trace_to_chunks(sample_trace_raw) end @@ -153,7 +217,7 @@ context 'when buffer size is larger than file size', :partial_support do before do - set_larger_buffer_size_than(size) + set_larger_buffer_size_than(sample_trace_raw.length) fill_trace_to_chunks(sample_trace_raw) end @@ -164,11 +228,11 @@ end context 'when tries to read oversize' do - let(:length) { size + 1000 } + let(:length) { sample_trace_raw.length + 1000 } context 'when buffer size is smaller than file size' do before do - set_smaller_buffer_size_than(size) + set_smaller_buffer_size_than(sample_trace_raw.length) fill_trace_to_chunks(sample_trace_raw) end @@ -179,7 +243,7 @@ context 'when buffer size is larger than file size', :partial_support do before do - set_larger_buffer_size_than(size) + set_larger_buffer_size_than(sample_trace_raw.length) fill_trace_to_chunks(sample_trace_raw) end @@ -194,7 +258,7 @@ context 'when buffer size is smaller than file size' do before do - set_smaller_buffer_size_than(size) + set_smaller_buffer_size_than(sample_trace_raw.length) fill_trace_to_chunks(sample_trace_raw) end @@ -205,7 +269,7 @@ context 'when buffer size is larger than file size', :partial_support do before do - set_larger_buffer_size_than(size) + set_larger_buffer_size_than(sample_trace_raw.length) fill_trace_to_chunks(sample_trace_raw) end @@ -214,21 +278,6 @@ end end end - - context 'when chunk store failed to get chunk' do - let(:length) { nil } - - before do - set_smaller_buffer_size_than(size) - fill_trace_to_chunks(sample_trace_raw) - - stub_chunk_store_get_failed - end - - it 'reads a trace' do - expect { subject }.to raise_error(described_class::FailedToGetChunkError) - end - end end describe '#readline' do @@ -244,23 +293,9 @@ end end - context 'when chunk store failed to get chunk' do - let(:length) { nil } - - before do - set_smaller_buffer_size_than(size) - fill_trace_to_chunks(sample_trace_raw) - stub_chunk_store_get_failed - end - - it 'reads a trace' do - expect { subject }.to raise_error(described_class::FailedToGetChunkError) - end - end - context 'when buffer size is smaller than file size' do before do - set_smaller_buffer_size_than(size) + set_smaller_buffer_size_than(sample_trace_raw.length) fill_trace_to_chunks(sample_trace_raw) end @@ -269,7 +304,7 @@ context 'when buffer size is larger than file size', :partial_support do before do - set_larger_buffer_size_than(size) + set_larger_buffer_size_than(sample_trace_raw.length) fill_trace_to_chunks(sample_trace_raw) end @@ -278,11 +313,11 @@ context 'when pos is at middle of the file' do before do - set_smaller_buffer_size_than(size) + set_smaller_buffer_size_than(sample_trace_raw.length) fill_trace_to_chunks(sample_trace_raw) - chunked_io.seek(size / 2) - string_io.seek(size / 2) + chunked_io.seek(chunked_io.size / 2) + string_io.seek(string_io.size / 2) end it 'reads from pos' do @@ -296,171 +331,91 @@ let(:data) { sample_trace_raw } - context 'when write mode' do - let(:mode) { 'wb' } - - context 'when buffer size is smaller than file size' do - before do - set_smaller_buffer_size_than(size) - end - - it 'writes a trace' do - is_expected.to eq(data.length) + context 'when append mode', :partial_support do + let(:mode) { 'a+b' } - described_class.open(job_id, size, 'rb') do |stream| - expect(stream.read).to eq(data) - expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) - expect(chunk_store.chunks_size(job_id)).to eq(data.length) + context 'when data does not exist' do + context 'when buffer size is smaller than file size' do + before do + set_smaller_buffer_size_than(sample_trace_raw.length) end - end - end - - context 'when buffer size is larger than file size', :partial_support do - before do - set_larger_buffer_size_than(size) - end - it 'writes a trace' do - is_expected.to eq(data.length) + it 'writes a trace' do + is_expected.to eq(data.length) - described_class.open(job_id, size, 'rb') do |stream| - expect(stream.read).to eq(data) - expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) - expect(chunk_store.chunks_size(job_id)).to eq(data.length) + described_class.new(job_id, nil, 'rb') do |stream| + expect(stream.read).to eq(data) + expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) + expect(chunk_store.chunks_size(job_id)).to eq(data.length) + end end end - end - context 'when data is nil' do - let(:data) { nil } - - it 'writes a trace' do - expect { subject } .to raise_error('Could not write empty data') - end - end - end - - context 'when append mode', :partial_support do - let(:original_data) { 'original data' } - let(:total_size) { original_data.length + data.length } - - context 'when buffer size is smaller than file size' do - before do - set_smaller_buffer_size_than(size) - fill_trace_to_chunks(original_data) - end - - it 'appends a trace' do - described_class.open(job_id, original_data.length, 'a+b') do |stream| - expect(stream.write(data)).to eq(data.length) + context 'when buffer size is larger than file size', :partial_support do + before do + set_larger_buffer_size_than(data.length) end - described_class.open(job_id, total_size, 'rb') do |stream| - expect(stream.read).to eq(original_data + data) - expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) - expect(chunk_store.chunks_size(job_id)).to eq(total_size) - end - end - end + it 'writes a trace' do + is_expected.to eq(data.length) - context 'when buffer size is larger than file size' do - before do - set_larger_buffer_size_than(size) - fill_trace_to_chunks(original_data) + described_class.new(job_id, nil, 'rb') do |stream| + expect(stream.read).to eq(data) + expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) + expect(chunk_store.chunks_size(job_id)).to eq(data.length) + end + end end - it 'appends a trace' do - described_class.open(job_id, original_data.length, 'a+b') do |stream| - expect(stream.write(data)).to eq(data.length) - end + context 'when data is nil' do + let(:data) { nil } - described_class.open(job_id, total_size, 'rb') do |stream| - expect(stream.read).to eq(original_data + data) - expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) - expect(chunk_store.chunks_size(job_id)).to eq(total_size) + it 'writes a trace' do + expect { subject } .to raise_error('Could not write empty data') end end end - end - end - - describe '#truncate' do - context 'when data exists' do - context 'when buffer size is smaller than file size' do - before do - set_smaller_buffer_size_than(size) - fill_trace_to_chunks(sample_trace_raw) - end - it 'truncates a trace' do - described_class.open(job_id, size, 'rb') do |stream| - expect(stream.read).to eq(sample_trace_raw) - end + context 'when data already exists' do + let(:exist_data) { 'exist data' } + let(:total_size) { exist_data.length + data.length } - described_class.open(job_id, size, 'wb') do |stream| - stream.truncate(0) + context 'when buffer size is smaller than file size' do + before do + set_smaller_buffer_size_than(data.length) + fill_trace_to_chunks(exist_data) end - described_class.open(job_id, 0, 'rb') do |stream| - expect(stream.read).to be_empty - end - - expect(chunk_store.chunks_count(job_id)).to eq(0) - expect(chunk_store.chunks_size(job_id)).to eq(0) - end - - context 'when offset is negative' do - it 'raises an error' do - described_class.open(job_id, size, 'wb') do |stream| - expect { stream.truncate(-1) }.to raise_error('Offset is out of bound') + it 'appends a trace' do + described_class.new(job_id, nil, 'a+b') do |stream| + expect(stream.write(data)).to eq(data.length) end - end - end - context 'when offset is larger than file size' do - it 'raises an error' do - described_class.open(job_id, size, 'wb') do |stream| - expect { stream.truncate(size + 1) }.to raise_error('Offset is out of bound') + described_class.new(job_id, nil, 'rb') do |stream| + expect(stream.read).to eq(exist_data + data) + expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) + expect(chunk_store.chunks_size(job_id)).to eq(total_size) end end end - end - - context 'when buffer size is larger than file size', :partial_support do - before do - set_larger_buffer_size_than(size) - fill_trace_to_chunks(sample_trace_raw) - end - it 'truncates a trace' do - described_class.open(job_id, size, 'rb') do |stream| - expect(stream.read).to eq(sample_trace_raw) + context 'when buffer size is larger than file size' do + before do + set_larger_buffer_size_than(data.length) + fill_trace_to_chunks(exist_data) end - described_class.open(job_id, size, 'wb') do |stream| - stream.truncate(0) - end + it 'appends a trace' do + described_class.new(job_id, nil, 'a+b') do |stream| + expect(stream.write(data)).to eq(data.length) + end - described_class.open(job_id, 0, 'rb') do |stream| - expect(stream.read).to be_empty + described_class.new(job_id, nil, 'rb') do |stream| + expect(stream.read).to eq(exist_data + data) + expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) + expect(chunk_store.chunks_size(job_id)).to eq(total_size) + end end - - expect(chunk_store.chunks_count(job_id)).to eq(0) - expect(chunk_store.chunks_size(job_id)).to eq(0) - end - end - end - - context 'when data does not exist' do - before do - set_smaller_buffer_size_than(size) - end - - it 'truncates a trace' do - described_class.open(job_id, size, 'wb') do |stream| - stream.truncate(0) - expect(stream.send(:tell)).to eq(0) - expect(stream.send(:size)).to eq(0) end end end -- GitLab From 1de5b8db5a684d367a1555c754220ba6b9f1e268 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Mon, 2 Apr 2018 23:11:48 +0900 Subject: [PATCH 17/86] Fix Live trace --- .../ci/trace/chunked_file/chunked_io.rb | 19 +- .../ci/trace/chunked_file/live_trace.rb | 15 +- .../ci/trace/chunked_file/chunked_io_spec.rb | 8 +- .../ci/trace/chunked_file/live_trace_spec.rb | 216 +++++------------- spec/support/chunked_io/chunked_io_helpers.rb | 29 +-- spec/support/chunked_io/live_trace_helpers.rb | 32 --- .../chunked_io_shared_examples.rb | 119 ++++++---- 7 files changed, 146 insertions(+), 292 deletions(-) delete mode 100644 spec/support/chunked_io/live_trace_helpers.rb diff --git a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb index f28955264c86..c9e5de8d32c8 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb @@ -202,17 +202,6 @@ def write_chunk(data) written_size end - def truncate_chunk(offset) - chunk_store.open(job_id, chunk_index, params_for_store) do |store| - with_callbacks(:truncate_chunk, store) do - removed_size = store.size - offset - store.truncate!(offset) - - removed_size - end - end - end - def params_for_store(c_index = chunk_index) { buffer_size: buffer_size, @@ -241,12 +230,8 @@ def chunks_count (size / buffer_size.to_f).ceil end - def first_chunk? - chunk_index == 0 - end - - def last_chunk? - (chunk_start...chunk_end).include?(tell) + def last_range + ((size / buffer_size) * buffer_size..size) end def chunk_store diff --git a/lib/gitlab/ci/trace/chunked_file/live_trace.rb b/lib/gitlab/ci/trace/chunked_file/live_trace.rb index fe2205144cb0..1e1700b43c95 100644 --- a/lib/gitlab/ci/trace/chunked_file/live_trace.rb +++ b/lib/gitlab/ci/trace/chunked_file/live_trace.rb @@ -5,8 +5,7 @@ module ChunkedFile class LiveTrace < ChunkedIO class << self def exist?(job_id) - ChunkStores::Redis.chunks_count(job_id) > 0 || - ChunkStores::Database.chunks_count(job_id) > 0 + ChunkStore::Redis.chunks_count(job_id) > 0 || ChunkStore::Database.chunks_count(job_id) > 0 end end @@ -22,7 +21,7 @@ def stash_to_database(store) end end - # Efficient process than iterating each + # This is more efficient than iterating each chunk store and deleting def truncate(offset) if offset == 0 delete @@ -33,13 +32,9 @@ def truncate(offset) end end - def present? - self.exist?(job_id) - end - def delete - ChunkStores::Redis.delete_all(job_id) - ChunkStores::Database.delete_all(job_id) + ChunkStore::Redis.delete_all(job_id) + ChunkStore::Database.delete_all(job_id) end private @@ -50,7 +45,7 @@ def calculate_size(job_id) end def chunk_store - if last_chunk? || eof? + if last_range.include?(tell) ChunkStore::Redis else ChunkStore::Database diff --git a/spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb b/spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb index ffee27ca6e2b..db0ec074da25 100644 --- a/spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb +++ b/spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb @@ -9,10 +9,10 @@ let(:mode) { 'rb' } describe 'ChunkStore is Redis', :partial_support do - let(:chunk_store) { Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis } + let(:chunk_stores) { [Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis] } before do - allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_store) + allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_stores.first) allow_any_instance_of(described_class).to receive(:buffer_size).and_return(128.kilobytes) end @@ -20,10 +20,10 @@ end describe 'ChunkStore is Database' do - let(:chunk_store) { Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database } + let(:chunk_stores) { [Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database] } before do - allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_store) + allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_stores.first) allow_any_instance_of(described_class).to receive(:buffer_size).and_return(128.kilobytes) end diff --git a/spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb b/spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb index 14f6c07a0396..ad91834f3201 100644 --- a/spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb +++ b/spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb @@ -1,201 +1,89 @@ require 'spec_helper' describe Gitlab::Ci::Trace::ChunkedFile::LiveTrace, :clean_gitlab_redis_cache do - include LiveTraceHelpers + include ChunkedIOHelpers - let(:chunked_io) { described_class.new(job_id, mode) } + let(:chunked_io) { described_class.new(job_id, nil, mode) } let(:job) { create(:ci_build) } let(:job_id) { job.id } - let(:size) { sample_trace_size } let(:mode) { 'rb' } - describe '#write' do - subject { chunked_io.write(data) } - - let(:data) { sample_trace_raw } - - context 'when write mode' do - let(:mode) { 'wb' } + let(:chunk_stores) do + [Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, + Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database] + end - context 'when buffer size is smaller than file size' do - before do - set_smaller_buffer_size_than(size) - end + describe 'ChunkStores are Redis and Database', :partial_support do + it_behaves_like 'ChunkedIO shared tests' + end - it 'writes a trace' do - is_expected.to eq(data.length) + describe '.exist?' do + subject { described_class.exist?(job_id) } - described_class.open(job_id, 'rb') do |stream| - expect(stream.read).to eq(data) - expect(total_chunks_count).to eq(stream.send(:chunks_count)) - expect(total_chunks_size).to eq(data.length) - end - end + context 'when a chunk exists in a store' do + before do + fill_trace_to_chunks(sample_trace_raw) end - context 'when buffer size is larger than file size' do - before do - set_larger_buffer_size_than(size) - end + it { is_expected.to be_truthy } + end - it 'writes a trace' do - is_expected.to eq(data.length) + context 'when chunks do not exists in any store' do + it { is_expected.to be_falsey } + end + end - described_class.open(job_id, 'rb') do |stream| - expect(stream.read).to eq(data) - expect(total_chunks_count).to eq(stream.send(:chunks_count)) - expect(total_chunks_size).to eq(data.length) - end - end - end + describe '#truncate' do + subject { chunked_io.truncate(offset) } - context 'when data is nil' do - let(:data) { nil } + let(:mode) { 'a+b' } - it 'writes a trace' do - expect { subject } .to raise_error('Could not write empty data') - end - end + before do + fill_trace_to_chunks(sample_trace_raw) end - context 'when append mode' do - let(:original_data) { 'original data' } - let(:total_size) { original_data.length + data.length } - - context 'when buffer size is smaller than file size' do - before do - set_smaller_buffer_size_than(size) - fill_trace_to_chunks(original_data) - end - - it 'appends a trace' do - described_class.open(job_id, 'a+b') do |stream| - expect(stream.write(data)).to eq(data.length) - end - - described_class.open(job_id, 'rb') do |stream| - expect(stream.read).to eq(original_data + data) - expect(total_chunks_count).to eq(stream.send(:chunks_count)) - expect(total_chunks_size).to eq(total_size) - end - end - end + context 'when offset is 0' do + let(:offset) { 0 } - context 'when buffer size is larger than file size' do - before do - set_larger_buffer_size_than(size) - fill_trace_to_chunks(original_data) - end - - it 'appends a trace' do - described_class.open(job_id, 'a+b') do |stream| - expect(stream.write(data)).to eq(data.length) - end - - described_class.open(job_id, 'rb') do |stream| - expect(stream.read).to eq(original_data + data) - expect(total_chunks_count).to eq(stream.send(:chunks_count)) - expect(total_chunks_size).to eq(total_size) - end - end + it 'deletes all chunks' do + expect { subject }.to change { described_class.exist?(job_id) }.from(true).to(false) end end - end - describe '#truncate' do - context 'when data exists' do - context 'when buffer size is smaller than file size' do - before do - puts "#{self.class.name} - #{__callee__}: ===== 1" - set_smaller_buffer_size_than(size) - fill_trace_to_chunks(sample_trace_raw) - end - - it 'truncates a trace' do - puts "#{self.class.name} - #{__callee__}: ===== 2" - described_class.open(job_id, 'rb') do |stream| - expect(stream.read).to eq(sample_trace_raw) - end - - puts "#{self.class.name} - #{__callee__}: ===== 3" - described_class.open(job_id, 'wb') do |stream| - stream.truncate(0) - end - - puts "#{self.class.name} - #{__callee__}: ===== 4" - expect(total_chunks_count).to eq(0) - expect(total_chunks_size).to eq(0) - - puts "#{self.class.name} - #{__callee__}: ===== 5" - described_class.open(job_id, 'rb') do |stream| - expect(stream.read).to be_empty - end - end - - context 'when offset is negative' do - it 'raises an error' do - described_class.open(job_id, 'wb') do |stream| - expect { stream.truncate(-1) }.to raise_error('Offset is out of bound') - end - end - end - - context 'when offset is larger than file size' do - it 'raises an error' do - described_class.open(job_id, 'wb') do |stream| - expect { stream.truncate(size + 1) }.to raise_error('Offset is out of bound') - end - end - end - end - - context 'when buffer size is larger than file size' do - before do - set_larger_buffer_size_than(size) - fill_trace_to_chunks(sample_trace_raw) - end - - it 'truncates a trace' do - described_class.open(job_id, 'rb') do |stream| - expect(stream.read).to eq(sample_trace_raw) - end + context 'when offset is size' do + let(:offset) { sample_trace_raw.length } - described_class.open(job_id, 'wb') do |stream| - stream.truncate(0) - end + it 'does nothing' do + expect { subject }.not_to change { described_class.exist?(job_id) } + end + end - described_class.open(job_id, 'rb') do |stream| - expect(stream.read).to be_empty - end + context 'when offset is else' do + let(:offset) { 10 } - expect(total_chunks_count).to eq(0) - expect(total_chunks_size).to eq(0) - end + it 'raises an error' do + expect { subject }.to raise_error('Unexpected operation') end end + end - context 'when data does not exist' do + describe '#delete' do + subject { chunked_io.delete } + + context 'when a chunk exists in a store' do before do - set_smaller_buffer_size_than(size) + fill_trace_to_chunks(sample_trace_raw) end - it 'truncates a trace' do - described_class.open(job_id, 'wb') do |stream| - stream.truncate(0) - expect(stream.send(:tell)).to eq(0) - expect(stream.send(:size)).to eq(0) - end + it 'deletes' do + expect { subject }.to change { described_class.exist?(job_id) }.from(true).to(false) end end - end - - def total_chunks_count - Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis.chunks_count(job_id) + - Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database.chunks_count(job_id) - end - def total_chunks_size - Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis.chunks_size(job_id) + - Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database.chunks_size(job_id) + context 'when chunks do not exists in any store' do + it 'deletes' do + expect { subject }.not_to change { described_class.exist?(job_id) } + end + end end end diff --git a/spec/support/chunked_io/chunked_io_helpers.rb b/spec/support/chunked_io/chunked_io_helpers.rb index 35b13bccdb4f..e9dd2e775386 100644 --- a/spec/support/chunked_io/chunked_io_helpers.rb +++ b/spec/support/chunked_io/chunked_io_helpers.rb @@ -6,31 +6,14 @@ def fill_trace_to_chunks(data) end def sample_trace_raw - if chunk_store == Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis - File.read(expand_fixture_path('trace/sample_trace')) + # ChunkStore::Database doesn't support appending, so the test data size has to be least common multiple + if chunk_stores.first == Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database + '01234567' * 32 # 256 bytes else - '01234567' * 32 + File.read(expand_fixture_path('trace/sample_trace')) end end - # def sample_trace_raw_for_live_trace - # File.read(expand_fixture_path('trace/sample_trace')) - # end - - # def sample_trace_size_for_live_trace - # sample_trace_raw_for_live_trace.length - # end - - # def fill_trace_to_chunks_for_live_trace(data) - # stream = described_class.new(job_id, 'a+b') - # stream.write(data) - # stream.close - # end - - # def stub_chunk_store_get_failed - # allow_any_instance_of(chunk_store).to receive(:get).and_return(nil) - # end - def set_smaller_buffer_size_than(file_size) blocks = (file_size / 128) new_size = (blocks / 2) * 128 @@ -42,4 +25,8 @@ def set_larger_buffer_size_than(file_size) new_size = (blocks * 2) * 128 allow_any_instance_of(described_class).to receive(:buffer_size).and_return(new_size) end + + def set_half_buffer_size_of(file_size) + allow_any_instance_of(described_class).to receive(:buffer_size).and_return(file_size / 2) + end end diff --git a/spec/support/chunked_io/live_trace_helpers.rb b/spec/support/chunked_io/live_trace_helpers.rb deleted file mode 100644 index 8ff85daff28d..000000000000 --- a/spec/support/chunked_io/live_trace_helpers.rb +++ /dev/null @@ -1,32 +0,0 @@ -module LiveTraceHelpers - def fill_trace_to_chunks(data) - stream = described_class.new(job_id, 'wb') - stream.write(data) - stream.close - end - - def sample_trace_raw - File.read(expand_fixture_path('trace/sample_trace')) - end - - def sample_trace_size - sample_trace_raw.length - end - - def stub_chunk_store_get_failed - allow_any_instance_of(Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis).to receive(:get).and_return(nil) - allow_any_instance_of(Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database).to receive(:get).and_return(nil) - end - - def set_smaller_buffer_size_than(file_size) - blocks = (file_size / 128) - new_size = (blocks / 2) * 128 - allow_any_instance_of(described_class).to receive(:buffer_size).and_return(new_size) - end - - def set_larger_buffer_size_than(file_size) - blocks = (file_size / 128) - new_size = (blocks * 2) * 128 - allow_any_instance_of(described_class).to receive(:buffer_size).and_return(new_size) - end -end diff --git a/spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb index 98199995182e..7a7a0760f2fe 100644 --- a/spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb +++ b/spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb @@ -1,6 +1,6 @@ shared_examples "ChunkedIO shared tests" do around(:each, :partial_support) do |example| - example.run if chunk_store == Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis + example.run if chunk_stores.first == Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis end describe '#new' do @@ -165,7 +165,7 @@ end it 'calls get_chunk only once' do - expect(chunk_store).to receive(:open).once.and_call_original + expect(chunk_stores.first).to receive(:open).once.and_call_original described_class.new(job_id, nil, 'rb').each_line { |line| } end @@ -178,15 +178,19 @@ context 'when read the whole size' do let(:length) { nil } + shared_examples 'reads a trace' do + it do + is_expected.to eq(sample_trace_raw) + end + end + context 'when buffer size is smaller than file size' do before do set_smaller_buffer_size_than(sample_trace_raw.length) fill_trace_to_chunks(sample_trace_raw) end - it 'reads a trace' do - is_expected.to eq(sample_trace_raw) - end + it_behaves_like 'reads a trace' end context 'when buffer size is larger than file size', :partial_support do @@ -195,9 +199,16 @@ fill_trace_to_chunks(sample_trace_raw) end - it 'reads a trace' do - is_expected.to eq(sample_trace_raw) + it_behaves_like 'reads a trace' + end + + context 'when buffer size is half of file size' do + before do + set_half_buffer_size_of(sample_trace_raw.length) + fill_trace_to_chunks(sample_trace_raw) end + + it_behaves_like 'reads a trace' end end @@ -286,7 +297,7 @@ let(:string_io) { StringIO.new(sample_trace_raw) } shared_examples 'all line matching' do - it 'reads a line' do + it do (0...sample_trace_raw.lines.count).each do expect(chunked_io.readline).to eq(string_io.readline) end @@ -311,6 +322,15 @@ it_behaves_like 'all line matching' end + context 'when buffer size is half of file size' do + before do + set_half_buffer_size_of(sample_trace_raw.length) + fill_trace_to_chunks(sample_trace_raw) + end + + it_behaves_like 'all line matching' + end + context 'when pos is at middle of the file' do before do set_smaller_buffer_size_than(sample_trace_raw.length) @@ -331,40 +351,46 @@ let(:data) { sample_trace_raw } - context 'when append mode', :partial_support do + context 'when append mode' do let(:mode) { 'a+b' } context 'when data does not exist' do - context 'when buffer size is smaller than file size' do - before do - set_smaller_buffer_size_than(sample_trace_raw.length) - end - - it 'writes a trace' do + shared_examples 'writes a trace' do + it do is_expected.to eq(data.length) described_class.new(job_id, nil, 'rb') do |stream| expect(stream.read).to eq(data) - expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) - expect(chunk_store.chunks_size(job_id)).to eq(data.length) + expect(chunk_stores.inject(0) { |sum, store| sum + store.chunks_count(job_id) }) + .to eq(stream.send(:chunks_count)) + expect(chunk_stores.inject(0) { |sum, store| sum + store.chunks_size(job_id) }) + .to eq(data.length) end end end + context 'when buffer size is smaller than file size' do + before do + set_smaller_buffer_size_than(data.length) + end + + it_behaves_like 'writes a trace' + end + context 'when buffer size is larger than file size', :partial_support do before do set_larger_buffer_size_than(data.length) end - it 'writes a trace' do - is_expected.to eq(data.length) + it_behaves_like 'writes a trace' + end - described_class.new(job_id, nil, 'rb') do |stream| - expect(stream.read).to eq(data) - expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) - expect(chunk_store.chunks_size(job_id)).to eq(data.length) - end + context 'when buffer size is half of file size' do + before do + set_half_buffer_size_of(data.length) end + + it_behaves_like 'writes a trace' end context 'when data is nil' do @@ -376,46 +402,51 @@ end end - context 'when data already exists' do + context 'when data already exists', :partial_support do let(:exist_data) { 'exist data' } let(:total_size) { exist_data.length + data.length } - context 'when buffer size is smaller than file size' do - before do - set_smaller_buffer_size_than(data.length) - fill_trace_to_chunks(exist_data) - end - - it 'appends a trace' do + shared_examples 'appends a trace' do + it do described_class.new(job_id, nil, 'a+b') do |stream| expect(stream.write(data)).to eq(data.length) end described_class.new(job_id, nil, 'rb') do |stream| expect(stream.read).to eq(exist_data + data) - expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) - expect(chunk_store.chunks_size(job_id)).to eq(total_size) + expect(chunk_stores.inject(0) { |sum, store| sum + store.chunks_count(job_id) }) + .to eq(stream.send(:chunks_count)) + expect(chunk_stores.inject(0) { |sum, store| sum + store.chunks_size(job_id) }) + .to eq(total_size) end end end - context 'when buffer size is larger than file size' do + context 'when buffer size is smaller than file size' do + before do + set_smaller_buffer_size_than(data.length) + fill_trace_to_chunks(exist_data) + end + + it_behaves_like 'appends a trace' + end + + context 'when buffer size is larger than file size', :partial_support do before do set_larger_buffer_size_than(data.length) fill_trace_to_chunks(exist_data) end - it 'appends a trace' do - described_class.new(job_id, nil, 'a+b') do |stream| - expect(stream.write(data)).to eq(data.length) - end + it_behaves_like 'appends a trace' + end - described_class.new(job_id, nil, 'rb') do |stream| - expect(stream.read).to eq(exist_data + data) - expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) - expect(chunk_store.chunks_size(job_id)).to eq(total_size) - end + context 'when buffer size is half of file size' do + before do + set_half_buffer_size_of(data.length) + fill_trace_to_chunks(exist_data) end + + it_behaves_like 'appends a trace' end end end -- GitLab From a689a220d352d20ff0b9c59a5783a3ef5c37c8d7 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Mon, 2 Apr 2018 23:42:48 +0900 Subject: [PATCH 18/86] Fix static analysys --- .../ci/trace/chunked_file/chunk_store/redis.rb | 8 ++++---- lib/gitlab/ci/trace/chunked_file/chunked_io.rb | 6 +++--- .../ci/trace/chunked_file/concerns/callbacks.rb | 15 +++++++-------- .../ci/trace/chunked_file/concerns/errors.rb | 1 - .../ci/trace/chunked_file/concerns/permissions.rb | 8 +++----- spec/factories/ci/job_trace_chunks.rb | 4 +--- .../ci/trace/chunked_file/live_trace_spec.rb | 2 +- 7 files changed, 19 insertions(+), 25 deletions(-) diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb index c87275319d95..dc3756a6339b 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb @@ -24,7 +24,7 @@ def exist?(job_id, chunk_index) def chunks_count(job_id) Gitlab::Redis::Cache.with do |redis| - redis.scan_each(:match => buffer_key(job_id, '?')).inject(0) do |sum, key| + redis.scan_each(match: buffer_key(job_id, '?')).inject(0) do |sum, key| sum + 1 end end @@ -32,15 +32,15 @@ def chunks_count(job_id) def chunks_size(job_id) Gitlab::Redis::Cache.with do |redis| - redis.scan_each(:match => buffer_key(job_id, '?')).inject(0) do |sum, key| - sum += redis.strlen(key) + redis.scan_each(match: buffer_key(job_id, '?')).inject(0) do |sum, key| + sum + redis.strlen(key) end end end def delete_all(job_id) Gitlab::Redis::Cache.with do |redis| - redis.scan_each(:match => buffer_key(job_id, '?')) do |key| + redis.scan_each(match: buffer_key(job_id, '?')) do |key| redis.del(key) end end diff --git a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb index c9e5de8d32c8..f3d3aae5a5b5 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb @@ -189,10 +189,10 @@ def write_chunk(data) chunk_store.open(job_id, chunk_index, params_for_store) do |store| with_callbacks(:write_chunk, store) do - written_size = if buffer_size == data.length || store.size == 0 - store.write!(data) - else + written_size = if store.size > 0 # # rubocop:disable ZeroLengthPredicate store.append!(data) + else + store.write!(data) end raise WriteError, 'Written size mismatch' unless data.length == written_size diff --git a/lib/gitlab/ci/trace/chunked_file/concerns/callbacks.rb b/lib/gitlab/ci/trace/chunked_file/concerns/callbacks.rb index 0a49ac4dbbf0..3990a492612c 100644 --- a/lib/gitlab/ci/trace/chunked_file/concerns/callbacks.rb +++ b/lib/gitlab/ci/trace/chunked_file/concerns/callbacks.rb @@ -7,27 +7,26 @@ module Callbacks extend ActiveSupport::Concern included do - class_attribute :_before_callbacks, :_after_callbacks, - :instance_writer => false + class_attribute :_before_callbacks, :_after_callbacks, instance_writer: false self._before_callbacks = Hash.new [] self._after_callbacks = Hash.new [] end def with_callbacks(kind, *args) - self.class._before_callbacks[kind].each { |c| send c, *args } + self.class._before_callbacks[kind].each { |c| send c, *args } # rubocop:disable GitlabSecurity/PublicSend yield - self.class._after_callbacks[kind].each { |c| send c, *args } + self.class._after_callbacks[kind].each { |c| send c, *args } # rubocop:disable GitlabSecurity/PublicSend end module ClassMethods def before_callback(kind, callback) - self._before_callbacks = self._before_callbacks. - merge kind => _before_callbacks[kind] + [callback] + self._before_callbacks = self._before_callbacks + .merge kind => _before_callbacks[kind] + [callback] end def after_callback(kind, callback) - self._after_callbacks = self._after_callbacks. - merge kind => _after_callbacks[kind] + [callback] + self._after_callbacks = self._after_callbacks + .merge kind => _after_callbacks[kind] + [callback] end end end diff --git a/lib/gitlab/ci/trace/chunked_file/concerns/errors.rb b/lib/gitlab/ci/trace/chunked_file/concerns/errors.rb index ccdb17005e29..0c75afde96b6 100644 --- a/lib/gitlab/ci/trace/chunked_file/concerns/errors.rb +++ b/lib/gitlab/ci/trace/chunked_file/concerns/errors.rb @@ -8,7 +8,6 @@ module Errors included do WriteError = Class.new(StandardError) - FailedToGetChunkError = Class.new(StandardError) end end end diff --git a/lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb b/lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb index 11d8fd0cdfc9..016b796afc26 100644 --- a/lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb +++ b/lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb @@ -6,12 +6,10 @@ module Concerns module Permissions extend ActiveSupport::Concern - WRITABLE_MODE = %w[a] - READABLE_MODE = %w[r +] + WRITABLE_MODE = %w[a].freeze + READABLE_MODE = %w[r +].freeze included do - PermissionError = Class.new(StandardError) - attr_reader :write_lock_uuid end @@ -20,7 +18,7 @@ def initialize(job_id, size, mode = 'rb') @write_lock_uuid = Gitlab::ExclusiveLease .new(write_lock_key(job_id), timeout: 1.hour.to_i).try_obtain - raise PermissionError, 'Already opened by another process' unless write_lock_uuid + raise IOError, 'Already opened by another process' unless write_lock_uuid end super diff --git a/spec/factories/ci/job_trace_chunks.rb b/spec/factories/ci/job_trace_chunks.rb index c7fe1921f3aa..f24e015f1861 100644 --- a/spec/factories/ci/job_trace_chunks.rb +++ b/spec/factories/ci/job_trace_chunks.rb @@ -1,7 +1,5 @@ -include ActionDispatch::TestProcess - FactoryBot.define do - factory :job_trace_chunk, class: Ci::JobTraceChunk do + factory :ci_job_trace_chunk, class: Ci::JobTraceChunk do job factory: :ci_build end end diff --git a/spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb b/spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb index ad91834f3201..c2c7fe1c8d32 100644 --- a/spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb +++ b/spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb @@ -10,7 +10,7 @@ let(:chunk_stores) do [Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, - Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database] + Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database] end describe 'ChunkStores are Redis and Database', :partial_support do -- GitLab From f0ddad75e902ee5d58c9d84668ca9bd6b8de3a9b Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Tue, 3 Apr 2018 01:22:46 +0900 Subject: [PATCH 19/86] Fix archive method. Fix trace spec --- lib/gitlab/ci/trace.rb | 15 ++-- .../ci/trace/chunked_file/chunked_io.rb | 2 +- .../ci/trace/chunked_file/live_trace.rb | 16 ++-- spec/lib/gitlab/ci/trace_spec.rb | 84 +++++++++++++++++-- 4 files changed, 97 insertions(+), 20 deletions(-) diff --git a/lib/gitlab/ci/trace.rb b/lib/gitlab/ci/trace.rb index c4000cc787af..3dc4848c23da 100644 --- a/lib/gitlab/ci/trace.rb +++ b/lib/gitlab/ci/trace.rb @@ -54,7 +54,7 @@ def append(data, offset) end def exist? - trace_artifact&.exists? || current_path.present? || old_trace.present? + trace_artifact&.exists? || ChunkedFile::LiveTrace.exist?(job.id) || current_path.present? || old_trace.present? end def read @@ -62,7 +62,7 @@ def read if trace_artifact trace_artifact.open elsif ChunkedFile::LiveTrace.exist?(job.id) - ChunkedFile::LiveTrace.new(job.id, "rb") + ChunkedFile::LiveTrace.new(job.id, nil, "rb") elsif current_path File.open(current_path, "rb") elsif old_trace @@ -81,7 +81,7 @@ def write if current_path current_path else - ChunkedFile::LiveTrace.new(job.id, "a+b") + ChunkedFile::LiveTrace.new(job.id, nil, "a+b") end else File.open(ensure_path, "a+b") @@ -110,9 +110,12 @@ def archive! raise ArchiveError, 'Job is not finished yet' unless job.complete? if ChunkedFile::LiveTrace.exist?(job.id) - ChunkedFile::LiveTrace.open(job.id, 'a+b') do |stream| - archive_stream!(stream) - stream.delete + ChunkedFile::LiveTrace.new(job.id, nil, 'a+b') do |live_trace_stream| + StringIO.new(live_trace_stream.read, 'rb').tap do |stream| + archive_stream!(stream) + end + + live_trace_stream.delete end elsif current_path File.open(current_path) do |stream| diff --git a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb index f3d3aae5a5b5..f9adbffc25a5 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb @@ -81,7 +81,7 @@ def each_line end end - def read(length = nil) + def read(length = nil, outbuf = nil) out = "" until eof? || (length && out.length >= length) diff --git a/lib/gitlab/ci/trace/chunked_file/live_trace.rb b/lib/gitlab/ci/trace/chunked_file/live_trace.rb index 1e1700b43c95..bf918fd4ace3 100644 --- a/lib/gitlab/ci/trace/chunked_file/live_trace.rb +++ b/lib/gitlab/ci/trace/chunked_file/live_trace.rb @@ -5,7 +5,7 @@ module ChunkedFile class LiveTrace < ChunkedIO class << self def exist?(job_id) - ChunkStore::Redis.chunks_count(job_id) > 0 || ChunkStore::Database.chunks_count(job_id) > 0 + ChunkedFile::ChunkStore::Redis.chunks_count(job_id) > 0 || ChunkedFile::ChunkStore::Database.chunks_count(job_id) > 0 end end @@ -14,7 +14,7 @@ def exist?(job_id) def stash_to_database(store) # Once data is filled into redis, move the data to database if store.filled? - ChunkStore::Database.open(job_id, chunk_index, params_for_store) do |to_store| + ChunkedFile::ChunkStore::Database.open(job_id, chunk_index, params_for_store) do |to_store| to_store.write!(store.get) store.delete! end @@ -33,22 +33,22 @@ def truncate(offset) end def delete - ChunkStore::Redis.delete_all(job_id) - ChunkStore::Database.delete_all(job_id) + ChunkedFile::ChunkStore::Redis.delete_all(job_id) + ChunkedFile::ChunkStore::Database.delete_all(job_id) end private def calculate_size(job_id) - ChunkStore::Redis.chunks_size(job_id) + - ChunkStore::Database.chunks_size(job_id) + ChunkedFile::ChunkStore::Redis.chunks_size(job_id) + + ChunkedFile::ChunkStore::Database.chunks_size(job_id) end def chunk_store if last_range.include?(tell) - ChunkStore::Redis + ChunkedFile::ChunkStore::Redis else - ChunkStore::Database + ChunkedFile::ChunkStore::Database end end diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb index 3a9371ed2e8c..c246ce9cdf3e 100644 --- a/spec/lib/gitlab/ci/trace_spec.rb +++ b/spec/lib/gitlab/ci/trace_spec.rb @@ -1,9 +1,13 @@ require 'spec_helper' -describe Gitlab::Ci::Trace do +describe Gitlab::Ci::Trace, :clean_gitlab_redis_cache do let(:build) { create(:ci_build) } let(:trace) { described_class.new(build) } + before do + stub_feature_flags(ci_enable_live_trace: true) + end + describe "associations" do it { expect(trace).to respond_to(:job) } it { expect(trace).to delegate_method(:old_trace).to(:job) } @@ -403,6 +407,10 @@ describe '#archive!' do subject { trace.archive! } + before do + stub_feature_flags(ci_enable_live_trace: false) + end + shared_examples 'archive trace file' do it do expect { subject }.to change { Ci::JobArtifact.count }.by(1) @@ -455,11 +463,44 @@ end end + shared_examples 'archive trace file in ChunkedIO' do + it do + expect { subject }.to change { Ci::JobArtifact.count }.by(1) + + build.reload + expect(build.trace.exist?).to be_truthy + expect(build.job_artifacts_trace.file.exists?).to be_truthy + expect(build.job_artifacts_trace.file.filename).to eq('job.log') + expect(Gitlab::Ci::Trace::ChunkedFile::LiveTrace.exist?(build.id)).to be_falsy + expect(src_checksum) + .to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest) + expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum) + end + end + + shared_examples 'source trace in ChunkedIO stays intact' do |error:| + it do + expect { subject }.to raise_error(error) + + build.reload + expect(build.trace.exist?).to be_truthy + expect(build.job_artifacts_trace).to be_nil + Gitlab::Ci::Trace::ChunkedFile::LiveTrace.new(build.id, nil, 'rb') do |stream| + expect(stream.read).to eq(trace_raw) + end + end + end + context 'when job does not have trace artifact' do context 'when trace file stored in default path' do - let!(:build) { create(:ci_build, :success, :trace_live) } - let!(:src_path) { trace.read { |s| return s.path } } - let!(:src_checksum) { Digest::SHA256.file(src_path).hexdigest } + let(:build) { create(:ci_build, :success, :trace_live) } + let(:src_path) { trace.read { |s| return s.path } } + let(:src_checksum) { Digest::SHA256.file(src_path).hexdigest } + + before do + stub_feature_flags(ci_enable_live_trace: false) + build; src_path; src_checksum; # Initialize after set feature flag + end it_behaves_like 'archive trace file' @@ -485,9 +526,11 @@ context 'when trace is stored in database' do let(:build) { create(:ci_build, :success) } let(:trace_content) { 'Sample trace' } - let!(:src_checksum) { Digest::SHA256.hexdigest(trace_content) } + let(:src_checksum) { Digest::SHA256.hexdigest(trace_content) } before do + stub_feature_flags(ci_enable_live_trace: false) + build; trace_content; src_checksum; # Initialize after set feature flag build.update_column(:trace, trace_content) end @@ -533,6 +576,37 @@ it_behaves_like 'archive trace in database' end end + + context 'when trace is stored in ChunkedIO' do + let(:build) { create(:ci_build, :success, :trace_live) } + let(:trace_raw) { build.trace.raw } + let(:src_checksum) { Digest::SHA256.hexdigest(trace_raw) } + + before do + stub_feature_flags(ci_enable_live_trace: true) + build; trace_raw; src_checksum; # Initialize after set feature flag + end + + it_behaves_like 'archive trace file in ChunkedIO' + + context 'when failed to create clone file' do + before do + allow(IO).to receive(:copy_stream).and_return(0) + end + + it_behaves_like 'source trace in ChunkedIO stays intact', error: Gitlab::Ci::Trace::ArchiveError + end + + context 'when failed to create job artifact record' do + before do + allow_any_instance_of(Ci::JobArtifact).to receive(:save).and_return(false) + allow_any_instance_of(Ci::JobArtifact).to receive_message_chain(:errors, :full_messages) + .and_return(%w[Error Error]) + end + + it_behaves_like 'source trace in ChunkedIO stays intact', error: ActiveRecord::RecordInvalid + end + end end context 'when job has trace artifact' do -- GitLab From 098fbac19985b0c9d96bab003d12f8614b332881 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Tue, 3 Apr 2018 15:35:21 +0900 Subject: [PATCH 20/86] Add a gurad logic not to hit chunk_store if unnecessary --- lib/gitlab/ci/trace/chunked_file/chunked_io.rb | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb index f9adbffc25a5..de38771b210c 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb @@ -102,6 +102,8 @@ def readline until eof? data = get_chunk + break if data.empty? + new_line = data.index("\n") if !new_line.nil? @@ -150,6 +152,8 @@ def in_range? end def get_chunk + return '' unless size > 0 + unless in_range? chunk_store.open(job_id, chunk_index, params_for_store) do |store| @chunk = store.get -- GitLab From 91fe68a6af7652f1951f9fa21e3e2722fe09495a Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Tue, 3 Apr 2018 18:42:40 +0900 Subject: [PATCH 21/86] Use bytesize everywhere instead of length --- lib/gitlab/ci/trace.rb | 2 +- .../chunked_file/chunk_store/database.rb | 10 +- .../trace/chunked_file/chunk_store/redis.rb | 6 +- .../ci/trace/chunked_file/chunked_io.rb | 54 +- .../ci/trace/chunked_file/concerns/errors.rb | 1 + spec/fixtures/trace/sample_trace | 1244 +---------------- .../ci/trace/chunked_file/live_trace_spec.rb | 2 +- .../chunked_io_shared_examples.rb | 64 +- 8 files changed, 140 insertions(+), 1243 deletions(-) diff --git a/lib/gitlab/ci/trace.rb b/lib/gitlab/ci/trace.rb index 3dc4848c23da..b209377d4623 100644 --- a/lib/gitlab/ci/trace.rb +++ b/lib/gitlab/ci/trace.rb @@ -77,7 +77,7 @@ def read def write stream = Gitlab::Ci::Trace::Stream.new do - if Feature.enabled?('ci_enable_live_trace') + if Feature.enabled?('ci_enable_live_trace') || true if current_path current_path else diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb index 3c2805e83f74..d3665031e9d7 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb @@ -27,7 +27,7 @@ def chunks_count(job_id) def chunks_size(job_id) ::Ci::JobTraceChunk.where(job_id: job_id).pluck('data') - .inject(0) { |sum, data| sum + data.length } + .inject(0) { |sum, data| sum + data.bytesize } end def delete_all(job_id) @@ -54,19 +54,19 @@ def get end def size - job_trace_chunk.data&.length || 0 + job_trace_chunk.data&.bytesize || 0 end def write!(data) - raise NotImplementedError, 'Partial writing is not supported' unless params[:buffer_size] == data&.length + raise NotImplementedError, 'Partial writing is not supported' unless params[:buffer_size] == data&.bytesize raise NotImplementedError, 'UPDATE (Overwriting data) is not supported' if job_trace_chunk.data - puts "#{self.class.name} - #{__callee__}: data.length: #{data.length.inspect} params[:chunk_index]: #{params[:chunk_index]}" + puts "#{self.class.name} - #{__callee__}: data.bytesize: #{data.bytesize.inspect} params[:chunk_index]: #{params[:chunk_index]}" job_trace_chunk.data = data job_trace_chunk.save! - data.length + data.bytesize end def append!(data) diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb index dc3756a6339b..5d3b43f82916 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb @@ -83,7 +83,7 @@ def size def write!(data) raise ArgumentError, 'Could not write empty data' unless data.present? - puts "#{self.class.name} - #{__callee__}: data.length: #{data.length.inspect} params[:chunk_index]: #{params[:chunk_index]}" + puts "#{self.class.name} - #{__callee__}: data.bytesize: #{data.bytesize.inspect} params[:chunk_index]: #{params[:chunk_index]}" Gitlab::Redis::Cache.with do |redis| unless redis.set(buffer_key, data) == 'OK' raise WriteError, 'Failed to write' @@ -96,7 +96,7 @@ def write!(data) def append!(data) raise ArgumentError, 'Could not write empty data' unless data.present? - puts "#{self.class.name} - #{__callee__}: data.length: #{data.length.inspect} params[:chunk_index]: #{params[:chunk_index]}" + puts "#{self.class.name} - #{__callee__}: data.bytesize: #{data.bytesize.inspect} params[:chunk_index]: #{params[:chunk_index]}" Gitlab::Redis::Cache.with do |redis| raise BufferKeyNotFoundError, 'Buffer key is not found' unless redis.exists(buffer_key) @@ -104,7 +104,7 @@ def append!(data) new_size = redis.append(buffer_key, data) appended_size = new_size - original_size - raise WriteError, 'Failed to append' unless appended_size == data.length + raise WriteError, 'Failed to append' unless appended_size == data.bytesize appended_size end diff --git a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb index de38771b210c..5767d1487cd9 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb @@ -84,7 +84,7 @@ def each_line def read(length = nil, outbuf = nil) out = "" - until eof? || (length && out.length >= length) + until eof? || (length && out.bytesize >= length) data = get_chunk break if data.empty? @@ -92,7 +92,7 @@ def read(length = nil, outbuf = nil) @tell += data.bytesize end - out = out[0, length] if length && out.length > length + out = out.byteslice(0, length) if length && out.bytesize > length out end @@ -104,15 +104,15 @@ def readline data = get_chunk break if data.empty? - new_line = data.index("\n") + new_line_pos = byte_position(data, "\n") - if !new_line.nil? - out << data[0..new_line] - @tell += new_line + 1 - break - else + if new_line_pos.nil? out << data @tell += data.bytesize + else + out << data.byteslice(0..new_line_pos) + @tell += new_line_pos + 1 + break end end @@ -123,7 +123,7 @@ def write(data) raise ArgumentError, 'Could not write empty data' unless data.present? if mode.include?('w') - write_as_overwrite(data) + raise NotImplementedError, "Overwrite is not supported" elsif mode.include?('a') write_as_append(data) end @@ -157,27 +157,26 @@ def get_chunk unless in_range? chunk_store.open(job_id, chunk_index, params_for_store) do |store| @chunk = store.get - @chunk_range = (chunk_start...(chunk_start + chunk.length)) + + raise ReadError, 'Could not get a chunk' unless chunk && chunk.present? + + @chunk_range = (chunk_start...(chunk_start + chunk.bytesize)) end end - @chunk[chunk_offset..buffer_size] - end - - def write_as_overwrite(data) - raise NotImplementedError, "Overwrite is not supported" + @chunk.byteslice(chunk_offset, buffer_size) end def write_as_append(data) @tell = size - data_size = data.size + data_size = data.bytesize new_tell = tell + data_size data_offset = 0 until tell == new_tell writable_size = buffer_size - chunk_offset - writable_data = data[data_offset...(data_offset + writable_size)] + writable_data = data.byteslice(data_offset, writable_size) written_size = write_chunk(writable_data) data_offset += written_size @@ -199,7 +198,7 @@ def write_chunk(data) store.write!(data) end - raise WriteError, 'Written size mismatch' unless data.length == written_size + raise WriteError, 'Written size mismatch' unless data.bytesize == written_size end end @@ -249,6 +248,25 @@ def buffer_size def calculate_size(job_id) chunk_store.chunks_size(job_id) end + + def byte_position(data, pattern_byte) + index_as_string = data.index(pattern_byte) + return nil unless index_as_string + + if data.getbyte(index_as_string) == pattern_byte.getbyte(0) + index_as_string + else + data2 = data.byteslice(index_as_string, 100) + additional_pos = 0 + data2.each_byte do |b| + break if b == pattern_byte.getbyte(0) + + additional_pos += 1 + end + + index_as_string + additional_pos + end + end end end end diff --git a/lib/gitlab/ci/trace/chunked_file/concerns/errors.rb b/lib/gitlab/ci/trace/chunked_file/concerns/errors.rb index 0c75afde96b6..5fba9605585f 100644 --- a/lib/gitlab/ci/trace/chunked_file/concerns/errors.rb +++ b/lib/gitlab/ci/trace/chunked_file/concerns/errors.rb @@ -8,6 +8,7 @@ module Errors included do WriteError = Class.new(StandardError) + ReadError = Class.new(StandardError) end end end diff --git a/spec/fixtures/trace/sample_trace b/spec/fixtures/trace/sample_trace index 55fcb9d27568..c78d2d671e43 100644 --- a/spec/fixtures/trace/sample_trace +++ b/spec/fixtures/trace/sample_trace @@ -1,1185 +1,63 @@ -Running with gitlab-runner 10.4.0 (857480b6) - on docker-auto-scale-com (9a6801bd) -Using Docker executor with image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.6-golang-1.9-git-2.14-chrome-63.0-node-8.x-yarn-1.2-postgresql-9.6 ... -Starting service postgres:9.2 ... -Pulling docker image postgres:9.2 ... -Using docker image postgres:9.2 ID=sha256:18cdbca56093c841d28e629eb8acd4224afe0aa4c57c839351fc181888b8a470 for postgres service... -Starting service redis:alpine ... -Pulling docker image redis:alpine ... -Using docker image redis:alpine ID=sha256:cb1ec54b370d4a91dff57d00f91fd880dc710160a58440adaa133e0f84ae999d for redis service... -Waiting for services to be up and running... -Using docker image sha256:3006a02a5a6f0a116358a13bbc46ee46fb2471175efd5b7f9b1c22345ec2a8e9 for predefined container... -Pulling docker image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.6-golang-1.9-git-2.14-chrome-63.0-node-8.x-yarn-1.2-postgresql-9.6 ... -Using docker image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.6-golang-1.9-git-2.14-chrome-63.0-node-8.x-yarn-1.2-postgresql-9.6 ID=sha256:1f59be408f12738509ffe4177d65e9de6391f32461de83d9d45f58517b30af99 for build container... -section_start:1517486886:prepare_script -Running on runner-9a6801bd-project-13083-concurrent-0 via runner-9a6801bd-gsrm-1517484168-a8449153... -section_end:1517486887:prepare_script -section_start:1517486887:get_sources -Fetching changes for 42624-gitaly-bundle-isolation-not-working-in-ci with git depth set to 20... -Removing .gitlab_shell_secret -Removing .gitlab_workhorse_secret -Removing .yarn-cache/ -Removing config/database.yml -Removing config/gitlab.yml -Removing config/redis.cache.yml -Removing config/redis.queues.yml -Removing config/redis.shared_state.yml -Removing config/resque.yml -Removing config/secrets.yml -Removing coverage/ -Removing knapsack/ -Removing log/api_json.log -Removing log/application.log -Removing log/gitaly-test.log -Removing log/githost.log -Removing log/grpc.log -Removing log/test_json.log -Removing node_modules/ -Removing public/assets/ -Removing rspec_flaky/ -Removing shared/tmp/ -Removing tmp/tests/ -Removing vendor/ruby/ -HEAD is now at 4cea24f Converted todos.js to axios -From https://gitlab.com/gitlab-org/gitlab-ce - * [new branch] 42624-gitaly-bundle-isolation-not-working-in-ci -> origin/42624-gitaly-bundle-isolation-not-working-in-ci -Checking out f42a5e24 as 42624-gitaly-bundle-isolation-not-working-in-ci... +Running with gitlab-runner 10.0.2 (a9a76a50) + on ShinyaMaedas-MacBook-Pro.local (e1e5600d) +Using Docker executor with image ruby:2.1 ... +Using docker image sha256:35c04f14f9926d1c8c68927cb43f69435fda36ecbaa3ca6f92218205363a2b99 for predefined container... +Pulling docker image ruby:2.1 ... +Using docker image ruby:2.1 ID=sha256:223d1eaa9523fa64e78f5a92b701c9c11cbc507f0ff62246dbbacdae395ffea3 for build container... +Running on runner-e1e5600d-project-64-concurrent-0 via ShinyaMaedasMBP... +Fetching changes... +Removing index.html +HEAD is now at 59a8e85 Update .gitlab-ci.yml +Checking out 59a8e85d as master... Skipping Git submodules setup -section_end:1517486896:get_sources -section_start:1517486896:restore_cache -Checking cache for ruby-2.3.6-with-yarn... -Downloading cache.zip from http://runners-cache-5-internal.gitlab.com:444/runner/project/13083/ruby-2.3.6-with-yarn -Successfully extracted cache -section_end:1517486919:restore_cache -section_start:1517486919:download_artifacts -Downloading artifacts for retrieve-tests-metadata (50551658)... -Downloading artifacts from coordinator... ok  id=50551658 responseStatus=200 OK token=HhF7y_1X -Downloading artifacts for compile-assets (50551659)... -Downloading artifacts from coordinator... ok  id=50551659 responseStatus=200 OK token=wTz6JrCP -Downloading artifacts for setup-test-env (50551660)... -Downloading artifacts from coordinator... ok  id=50551660 responseStatus=200 OK token=DTGgeVF5 -WARNING: tmp/tests/gitlab-shell/.gitlab_shell_secret: chmod tmp/tests/gitlab-shell/.gitlab_shell_secret: no such file or directory (suppressing repeats) -section_end:1517486934:download_artifacts -section_start:1517486934:build_script -$ bundle --version -Bundler version 1.16.1 -$ source scripts/utils.sh -$ source scripts/prepare_build.sh -The Gemfile's dependencies are satisfied -Successfully installed knapsack-1.15.0 -1 gem installed -NOTICE: database "gitlabhq_test" does not exist, skipping -DROP DATABASE -CREATE DATABASE -CREATE ROLE -GRANT --- enable_extension("plpgsql") - -> 0.0156s --- enable_extension("pg_trgm") - -> 0.0156s --- create_table("abuse_reports", {:force=>:cascade}) - -> 0.0119s --- create_table("appearances", {:force=>:cascade}) - -> 0.0065s --- create_table("application_settings", {:force=>:cascade}) - -> 0.0382s --- create_table("audit_events", {:force=>:cascade}) - -> 0.0056s --- add_index("audit_events", ["entity_id", "entity_type"], {:name=>"index_audit_events_on_entity_id_and_entity_type", :using=>:btree}) - -> 0.0040s --- create_table("award_emoji", {:force=>:cascade}) - -> 0.0058s --- add_index("award_emoji", ["awardable_type", "awardable_id"], {:name=>"index_award_emoji_on_awardable_type_and_awardable_id", :using=>:btree}) - -> 0.0068s --- add_index("award_emoji", ["user_id", "name"], {:name=>"index_award_emoji_on_user_id_and_name", :using=>:btree}) - -> 0.0043s --- create_table("boards", {:force=>:cascade}) - -> 0.0049s --- add_index("boards", ["project_id"], {:name=>"index_boards_on_project_id", :using=>:btree}) - -> 0.0056s --- create_table("broadcast_messages", {:force=>:cascade}) - -> 0.0056s --- add_index("broadcast_messages", ["starts_at", "ends_at", "id"], {:name=>"index_broadcast_messages_on_starts_at_and_ends_at_and_id", :using=>:btree}) - -> 0.0041s --- create_table("chat_names", {:force=>:cascade}) - -> 0.0056s --- add_index("chat_names", ["service_id", "team_id", "chat_id"], {:name=>"index_chat_names_on_service_id_and_team_id_and_chat_id", :unique=>true, :using=>:btree}) - -> 0.0039s --- add_index("chat_names", ["user_id", "service_id"], {:name=>"index_chat_names_on_user_id_and_service_id", :unique=>true, :using=>:btree}) - -> 0.0036s --- create_table("chat_teams", {:force=>:cascade}) - -> 0.0068s --- add_index("chat_teams", ["namespace_id"], {:name=>"index_chat_teams_on_namespace_id", :unique=>true, :using=>:btree}) - -> 0.0098s --- create_table("ci_build_trace_section_names", {:force=>:cascade}) - -> 0.0048s --- add_index("ci_build_trace_section_names", ["project_id", "name"], {:name=>"index_ci_build_trace_section_names_on_project_id_and_name", :unique=>true, :using=>:btree}) - -> 0.0035s --- create_table("ci_build_trace_sections", {:force=>:cascade}) - -> 0.0040s --- add_index("ci_build_trace_sections", ["build_id", "section_name_id"], {:name=>"index_ci_build_trace_sections_on_build_id_and_section_name_id", :unique=>true, :using=>:btree}) - -> 0.0035s --- add_index("ci_build_trace_sections", ["project_id"], {:name=>"index_ci_build_trace_sections_on_project_id", :using=>:btree}) - -> 0.0033s --- create_table("ci_builds", {:force=>:cascade}) - -> 0.0062s --- add_index("ci_builds", ["auto_canceled_by_id"], {:name=>"index_ci_builds_on_auto_canceled_by_id", :using=>:btree}) - -> 0.0035s --- add_index("ci_builds", ["commit_id", "stage_idx", "created_at"], {:name=>"index_ci_builds_on_commit_id_and_stage_idx_and_created_at", :using=>:btree}) - -> 0.0032s --- add_index("ci_builds", ["commit_id", "status", "type"], {:name=>"index_ci_builds_on_commit_id_and_status_and_type", :using=>:btree}) - -> 0.0032s --- add_index("ci_builds", ["commit_id", "type", "name", "ref"], {:name=>"index_ci_builds_on_commit_id_and_type_and_name_and_ref", :using=>:btree}) - -> 0.0035s --- add_index("ci_builds", ["commit_id", "type", "ref"], {:name=>"index_ci_builds_on_commit_id_and_type_and_ref", :using=>:btree}) - -> 0.0042s --- add_index("ci_builds", ["project_id", "id"], {:name=>"index_ci_builds_on_project_id_and_id", :using=>:btree}) - -> 0.0031s --- add_index("ci_builds", ["protected"], {:name=>"index_ci_builds_on_protected", :using=>:btree}) - -> 0.0031s --- add_index("ci_builds", ["runner_id"], {:name=>"index_ci_builds_on_runner_id", :using=>:btree}) - -> 0.0033s --- add_index("ci_builds", ["stage_id"], {:name=>"index_ci_builds_on_stage_id", :using=>:btree}) - -> 0.0035s --- add_index("ci_builds", ["status", "type", "runner_id"], {:name=>"index_ci_builds_on_status_and_type_and_runner_id", :using=>:btree}) - -> 0.0031s --- add_index("ci_builds", ["status"], {:name=>"index_ci_builds_on_status", :using=>:btree}) - -> 0.0032s --- add_index("ci_builds", ["token"], {:name=>"index_ci_builds_on_token", :unique=>true, :using=>:btree}) - -> 0.0028s --- add_index("ci_builds", ["updated_at"], {:name=>"index_ci_builds_on_updated_at", :using=>:btree}) - -> 0.0047s --- add_index("ci_builds", ["user_id"], {:name=>"index_ci_builds_on_user_id", :using=>:btree}) - -> 0.0029s --- create_table("ci_group_variables", {:force=>:cascade}) - -> 0.0055s --- add_index("ci_group_variables", ["group_id", "key"], {:name=>"index_ci_group_variables_on_group_id_and_key", :unique=>true, :using=>:btree}) - -> 0.0028s --- create_table("ci_job_artifacts", {:force=>:cascade}) - -> 0.0048s --- add_index("ci_job_artifacts", ["job_id", "file_type"], {:name=>"index_ci_job_artifacts_on_job_id_and_file_type", :unique=>true, :using=>:btree}) - -> 0.0027s --- add_index("ci_job_artifacts", ["project_id"], {:name=>"index_ci_job_artifacts_on_project_id", :using=>:btree}) - -> 0.0028s --- create_table("ci_pipeline_schedule_variables", {:force=>:cascade}) - -> 0.0044s --- add_index("ci_pipeline_schedule_variables", ["pipeline_schedule_id", "key"], {:name=>"index_ci_pipeline_schedule_variables_on_schedule_id_and_key", :unique=>true, :using=>:btree}) - -> 0.0032s --- create_table("ci_pipeline_schedules", {:force=>:cascade}) - -> 0.0047s --- add_index("ci_pipeline_schedules", ["next_run_at", "active"], {:name=>"index_ci_pipeline_schedules_on_next_run_at_and_active", :using=>:btree}) - -> 0.0029s --- add_index("ci_pipeline_schedules", ["project_id"], {:name=>"index_ci_pipeline_schedules_on_project_id", :using=>:btree}) - -> 0.0028s --- create_table("ci_pipeline_variables", {:force=>:cascade}) - -> 0.0045s --- add_index("ci_pipeline_variables", ["pipeline_id", "key"], {:name=>"index_ci_pipeline_variables_on_pipeline_id_and_key", :unique=>true, :using=>:btree}) - -> 0.0030s --- create_table("ci_pipelines", {:force=>:cascade}) - -> 0.0057s --- add_index("ci_pipelines", ["auto_canceled_by_id"], {:name=>"index_ci_pipelines_on_auto_canceled_by_id", :using=>:btree}) - -> 0.0030s --- add_index("ci_pipelines", ["pipeline_schedule_id"], {:name=>"index_ci_pipelines_on_pipeline_schedule_id", :using=>:btree}) - -> 0.0031s --- add_index("ci_pipelines", ["project_id", "ref", "status", "id"], {:name=>"index_ci_pipelines_on_project_id_and_ref_and_status_and_id", :using=>:btree}) - -> 0.0032s --- add_index("ci_pipelines", ["project_id", "sha"], {:name=>"index_ci_pipelines_on_project_id_and_sha", :using=>:btree}) - -> 0.0032s --- add_index("ci_pipelines", ["project_id"], {:name=>"index_ci_pipelines_on_project_id", :using=>:btree}) - -> 0.0035s --- add_index("ci_pipelines", ["status"], {:name=>"index_ci_pipelines_on_status", :using=>:btree}) - -> 0.0032s --- add_index("ci_pipelines", ["user_id"], {:name=>"index_ci_pipelines_on_user_id", :using=>:btree}) - -> 0.0029s --- create_table("ci_runner_projects", {:force=>:cascade}) - -> 0.0035s --- add_index("ci_runner_projects", ["project_id"], {:name=>"index_ci_runner_projects_on_project_id", :using=>:btree}) - -> 0.0029s --- add_index("ci_runner_projects", ["runner_id"], {:name=>"index_ci_runner_projects_on_runner_id", :using=>:btree}) - -> 0.0028s --- create_table("ci_runners", {:force=>:cascade}) - -> 0.0059s --- add_index("ci_runners", ["contacted_at"], {:name=>"index_ci_runners_on_contacted_at", :using=>:btree}) - -> 0.0030s --- add_index("ci_runners", ["is_shared"], {:name=>"index_ci_runners_on_is_shared", :using=>:btree}) - -> 0.0030s --- add_index("ci_runners", ["locked"], {:name=>"index_ci_runners_on_locked", :using=>:btree}) - -> 0.0030s --- add_index("ci_runners", ["token"], {:name=>"index_ci_runners_on_token", :using=>:btree}) - -> 0.0029s --- create_table("ci_stages", {:force=>:cascade}) - -> 0.0046s --- add_index("ci_stages", ["pipeline_id", "name"], {:name=>"index_ci_stages_on_pipeline_id_and_name", :using=>:btree}) - -> 0.0031s --- add_index("ci_stages", ["pipeline_id"], {:name=>"index_ci_stages_on_pipeline_id", :using=>:btree}) - -> 0.0030s --- add_index("ci_stages", ["project_id"], {:name=>"index_ci_stages_on_project_id", :using=>:btree}) - -> 0.0028s --- create_table("ci_trigger_requests", {:force=>:cascade}) - -> 0.0058s --- add_index("ci_trigger_requests", ["commit_id"], {:name=>"index_ci_trigger_requests_on_commit_id", :using=>:btree}) - -> 0.0031s --- create_table("ci_triggers", {:force=>:cascade}) - -> 0.0043s --- add_index("ci_triggers", ["project_id"], {:name=>"index_ci_triggers_on_project_id", :using=>:btree}) - -> 0.0033s --- create_table("ci_variables", {:force=>:cascade}) - -> 0.0059s --- add_index("ci_variables", ["project_id", "key", "environment_scope"], {:name=>"index_ci_variables_on_project_id_and_key_and_environment_scope", :unique=>true, :using=>:btree}) - -> 0.0031s --- create_table("cluster_platforms_kubernetes", {:force=>:cascade}) - -> 0.0053s --- add_index("cluster_platforms_kubernetes", ["cluster_id"], {:name=>"index_cluster_platforms_kubernetes_on_cluster_id", :unique=>true, :using=>:btree}) - -> 0.0028s --- create_table("cluster_projects", {:force=>:cascade}) - -> 0.0032s --- add_index("cluster_projects", ["cluster_id"], {:name=>"index_cluster_projects_on_cluster_id", :using=>:btree}) - -> 0.0035s --- add_index("cluster_projects", ["project_id"], {:name=>"index_cluster_projects_on_project_id", :using=>:btree}) - -> 0.0030s --- create_table("cluster_providers_gcp", {:force=>:cascade}) - -> 0.0051s --- add_index("cluster_providers_gcp", ["cluster_id"], {:name=>"index_cluster_providers_gcp_on_cluster_id", :unique=>true, :using=>:btree}) - -> 0.0034s --- create_table("clusters", {:force=>:cascade}) - -> 0.0052s --- add_index("clusters", ["enabled"], {:name=>"index_clusters_on_enabled", :using=>:btree}) - -> 0.0031s --- add_index("clusters", ["user_id"], {:name=>"index_clusters_on_user_id", :using=>:btree}) - -> 0.0028s --- create_table("clusters_applications_helm", {:force=>:cascade}) - -> 0.0045s --- create_table("clusters_applications_ingress", {:force=>:cascade}) - -> 0.0044s --- create_table("clusters_applications_prometheus", {:force=>:cascade}) - -> 0.0047s --- create_table("container_repositories", {:force=>:cascade}) - -> 0.0050s --- add_index("container_repositories", ["project_id", "name"], {:name=>"index_container_repositories_on_project_id_and_name", :unique=>true, :using=>:btree}) - -> 0.0032s --- add_index("container_repositories", ["project_id"], {:name=>"index_container_repositories_on_project_id", :using=>:btree}) - -> 0.0032s --- create_table("conversational_development_index_metrics", {:force=>:cascade}) - -> 0.0076s --- create_table("deploy_keys_projects", {:force=>:cascade}) - -> 0.0037s --- add_index("deploy_keys_projects", ["project_id"], {:name=>"index_deploy_keys_projects_on_project_id", :using=>:btree}) - -> 0.0032s --- create_table("deployments", {:force=>:cascade}) - -> 0.0049s --- add_index("deployments", ["created_at"], {:name=>"index_deployments_on_created_at", :using=>:btree}) - -> 0.0034s --- add_index("deployments", ["environment_id", "id"], {:name=>"index_deployments_on_environment_id_and_id", :using=>:btree}) - -> 0.0028s --- add_index("deployments", ["environment_id", "iid", "project_id"], {:name=>"index_deployments_on_environment_id_and_iid_and_project_id", :using=>:btree}) - -> 0.0029s --- add_index("deployments", ["project_id", "iid"], {:name=>"index_deployments_on_project_id_and_iid", :unique=>true, :using=>:btree}) - -> 0.0032s --- create_table("emails", {:force=>:cascade}) - -> 0.0046s --- add_index("emails", ["confirmation_token"], {:name=>"index_emails_on_confirmation_token", :unique=>true, :using=>:btree}) - -> 0.0030s --- add_index("emails", ["email"], {:name=>"index_emails_on_email", :unique=>true, :using=>:btree}) - -> 0.0035s --- add_index("emails", ["user_id"], {:name=>"index_emails_on_user_id", :using=>:btree}) - -> 0.0028s --- create_table("environments", {:force=>:cascade}) - -> 0.0052s --- add_index("environments", ["project_id", "name"], {:name=>"index_environments_on_project_id_and_name", :unique=>true, :using=>:btree}) - -> 0.0031s --- add_index("environments", ["project_id", "slug"], {:name=>"index_environments_on_project_id_and_slug", :unique=>true, :using=>:btree}) - -> 0.0028s --- create_table("events", {:force=>:cascade}) - -> 0.0046s --- add_index("events", ["action"], {:name=>"index_events_on_action", :using=>:btree}) - -> 0.0032s --- add_index("events", ["author_id"], {:name=>"index_events_on_author_id", :using=>:btree}) - -> 0.0027s --- add_index("events", ["project_id", "id"], {:name=>"index_events_on_project_id_and_id", :using=>:btree}) - -> 0.0027s --- add_index("events", ["target_type", "target_id"], {:name=>"index_events_on_target_type_and_target_id", :using=>:btree}) - -> 0.0027s --- create_table("feature_gates", {:force=>:cascade}) - -> 0.0046s --- add_index("feature_gates", ["feature_key", "key", "value"], {:name=>"index_feature_gates_on_feature_key_and_key_and_value", :unique=>true, :using=>:btree}) - -> 0.0031s --- create_table("features", {:force=>:cascade}) - -> 0.0041s --- add_index("features", ["key"], {:name=>"index_features_on_key", :unique=>true, :using=>:btree}) - -> 0.0030s --- create_table("fork_network_members", {:force=>:cascade}) - -> 0.0033s --- add_index("fork_network_members", ["fork_network_id"], {:name=>"index_fork_network_members_on_fork_network_id", :using=>:btree}) - -> 0.0033s --- add_index("fork_network_members", ["project_id"], {:name=>"index_fork_network_members_on_project_id", :unique=>true, :using=>:btree}) - -> 0.0029s --- create_table("fork_networks", {:force=>:cascade}) - -> 0.0049s --- add_index("fork_networks", ["root_project_id"], {:name=>"index_fork_networks_on_root_project_id", :unique=>true, :using=>:btree}) - -> 0.0029s --- create_table("forked_project_links", {:force=>:cascade}) - -> 0.0032s --- add_index("forked_project_links", ["forked_to_project_id"], {:name=>"index_forked_project_links_on_forked_to_project_id", :unique=>true, :using=>:btree}) - -> 0.0030s --- create_table("gcp_clusters", {:force=>:cascade}) - -> 0.0074s --- add_index("gcp_clusters", ["project_id"], {:name=>"index_gcp_clusters_on_project_id", :unique=>true, :using=>:btree}) - -> 0.0030s --- create_table("gpg_key_subkeys", {:force=>:cascade}) - -> 0.0042s --- add_index("gpg_key_subkeys", ["fingerprint"], {:name=>"index_gpg_key_subkeys_on_fingerprint", :unique=>true, :using=>:btree}) - -> 0.0029s --- add_index("gpg_key_subkeys", ["gpg_key_id"], {:name=>"index_gpg_key_subkeys_on_gpg_key_id", :using=>:btree}) - -> 0.0032s --- add_index("gpg_key_subkeys", ["keyid"], {:name=>"index_gpg_key_subkeys_on_keyid", :unique=>true, :using=>:btree}) - -> 0.0027s --- create_table("gpg_keys", {:force=>:cascade}) - -> 0.0042s --- add_index("gpg_keys", ["fingerprint"], {:name=>"index_gpg_keys_on_fingerprint", :unique=>true, :using=>:btree}) - -> 0.0032s --- add_index("gpg_keys", ["primary_keyid"], {:name=>"index_gpg_keys_on_primary_keyid", :unique=>true, :using=>:btree}) - -> 0.0026s --- add_index("gpg_keys", ["user_id"], {:name=>"index_gpg_keys_on_user_id", :using=>:btree}) - -> 0.0028s --- create_table("gpg_signatures", {:force=>:cascade}) - -> 0.0054s --- add_index("gpg_signatures", ["commit_sha"], {:name=>"index_gpg_signatures_on_commit_sha", :unique=>true, :using=>:btree}) - -> 0.0029s --- add_index("gpg_signatures", ["gpg_key_id"], {:name=>"index_gpg_signatures_on_gpg_key_id", :using=>:btree}) - -> 0.0026s --- add_index("gpg_signatures", ["gpg_key_primary_keyid"], {:name=>"index_gpg_signatures_on_gpg_key_primary_keyid", :using=>:btree}) - -> 0.0029s --- add_index("gpg_signatures", ["gpg_key_subkey_id"], {:name=>"index_gpg_signatures_on_gpg_key_subkey_id", :using=>:btree}) - -> 0.0032s --- add_index("gpg_signatures", ["project_id"], {:name=>"index_gpg_signatures_on_project_id", :using=>:btree}) - -> 0.0028s --- create_table("group_custom_attributes", {:force=>:cascade}) - -> 0.0044s --- add_index("group_custom_attributes", ["group_id", "key"], {:name=>"index_group_custom_attributes_on_group_id_and_key", :unique=>true, :using=>:btree}) - -> 0.0032s --- add_index("group_custom_attributes", ["key", "value"], {:name=>"index_group_custom_attributes_on_key_and_value", :using=>:btree}) - -> 0.0028s --- create_table("identities", {:force=>:cascade}) - -> 0.0043s --- add_index("identities", ["user_id"], {:name=>"index_identities_on_user_id", :using=>:btree}) - -> 0.0034s --- create_table("issue_assignees", {:id=>false, :force=>:cascade}) - -> 0.0013s --- add_index("issue_assignees", ["issue_id", "user_id"], {:name=>"index_issue_assignees_on_issue_id_and_user_id", :unique=>true, :using=>:btree}) - -> 0.0028s --- add_index("issue_assignees", ["user_id"], {:name=>"index_issue_assignees_on_user_id", :using=>:btree}) - -> 0.0029s --- create_table("issue_metrics", {:force=>:cascade}) - -> 0.0032s --- add_index("issue_metrics", ["issue_id"], {:name=>"index_issue_metrics", :using=>:btree}) - -> 0.0029s --- create_table("issues", {:force=>:cascade}) - -> 0.0051s --- add_index("issues", ["author_id"], {:name=>"index_issues_on_author_id", :using=>:btree}) - -> 0.0028s --- add_index("issues", ["confidential"], {:name=>"index_issues_on_confidential", :using=>:btree}) - -> 0.0029s --- add_index("issues", ["description"], {:name=>"index_issues_on_description_trigram", :using=>:gin, :opclasses=>{"description"=>"gin_trgm_ops"}}) - -> 0.0022s --- add_index("issues", ["milestone_id"], {:name=>"index_issues_on_milestone_id", :using=>:btree}) - -> 0.0027s --- add_index("issues", ["moved_to_id"], {:name=>"index_issues_on_moved_to_id", :where=>"(moved_to_id IS NOT NULL)", :using=>:btree}) - -> 0.0030s --- add_index("issues", ["project_id", "created_at", "id", "state"], {:name=>"index_issues_on_project_id_and_created_at_and_id_and_state", :using=>:btree}) - -> 0.0039s --- add_index("issues", ["project_id", "due_date", "id", "state"], {:name=>"idx_issues_on_project_id_and_due_date_and_id_and_state_partial", :where=>"(due_date IS NOT NULL)", :using=>:btree}) - -> 0.0031s --- add_index("issues", ["project_id", "iid"], {:name=>"index_issues_on_project_id_and_iid", :unique=>true, :using=>:btree}) - -> 0.0032s --- add_index("issues", ["project_id", "updated_at", "id", "state"], {:name=>"index_issues_on_project_id_and_updated_at_and_id_and_state", :using=>:btree}) - -> 0.0035s --- add_index("issues", ["relative_position"], {:name=>"index_issues_on_relative_position", :using=>:btree}) - -> 0.0030s --- add_index("issues", ["state"], {:name=>"index_issues_on_state", :using=>:btree}) - -> 0.0027s --- add_index("issues", ["title"], {:name=>"index_issues_on_title_trigram", :using=>:gin, :opclasses=>{"title"=>"gin_trgm_ops"}}) - -> 0.0021s --- add_index("issues", ["updated_at"], {:name=>"index_issues_on_updated_at", :using=>:btree}) - -> 0.0030s --- add_index("issues", ["updated_by_id"], {:name=>"index_issues_on_updated_by_id", :where=>"(updated_by_id IS NOT NULL)", :using=>:btree}) - -> 0.0028s --- create_table("keys", {:force=>:cascade}) - -> 0.0048s --- add_index("keys", ["fingerprint"], {:name=>"index_keys_on_fingerprint", :unique=>true, :using=>:btree}) - -> 0.0028s --- add_index("keys", ["user_id"], {:name=>"index_keys_on_user_id", :using=>:btree}) - -> 0.0029s --- create_table("label_links", {:force=>:cascade}) - -> 0.0041s --- add_index("label_links", ["label_id"], {:name=>"index_label_links_on_label_id", :using=>:btree}) - -> 0.0027s --- add_index("label_links", ["target_id", "target_type"], {:name=>"index_label_links_on_target_id_and_target_type", :using=>:btree}) - -> 0.0028s --- create_table("label_priorities", {:force=>:cascade}) - -> 0.0031s --- add_index("label_priorities", ["priority"], {:name=>"index_label_priorities_on_priority", :using=>:btree}) - -> 0.0028s --- add_index("label_priorities", ["project_id", "label_id"], {:name=>"index_label_priorities_on_project_id_and_label_id", :unique=>true, :using=>:btree}) - -> 0.0027s --- create_table("labels", {:force=>:cascade}) - -> 0.0046s --- add_index("labels", ["group_id", "project_id", "title"], {:name=>"index_labels_on_group_id_and_project_id_and_title", :unique=>true, :using=>:btree}) - -> 0.0028s --- add_index("labels", ["project_id"], {:name=>"index_labels_on_project_id", :using=>:btree}) - -> 0.0032s --- add_index("labels", ["template"], {:name=>"index_labels_on_template", :where=>"template", :using=>:btree}) - -> 0.0027s --- add_index("labels", ["title"], {:name=>"index_labels_on_title", :using=>:btree}) - -> 0.0030s --- add_index("labels", ["type", "project_id"], {:name=>"index_labels_on_type_and_project_id", :using=>:btree}) - -> 0.0028s --- create_table("lfs_objects", {:force=>:cascade}) - -> 0.0040s --- add_index("lfs_objects", ["oid"], {:name=>"index_lfs_objects_on_oid", :unique=>true, :using=>:btree}) - -> 0.0032s --- create_table("lfs_objects_projects", {:force=>:cascade}) - -> 0.0035s --- add_index("lfs_objects_projects", ["project_id"], {:name=>"index_lfs_objects_projects_on_project_id", :using=>:btree}) - -> 0.0025s --- create_table("lists", {:force=>:cascade}) - -> 0.0033s --- add_index("lists", ["board_id", "label_id"], {:name=>"index_lists_on_board_id_and_label_id", :unique=>true, :using=>:btree}) - -> 0.0026s --- add_index("lists", ["label_id"], {:name=>"index_lists_on_label_id", :using=>:btree}) - -> 0.0026s --- create_table("members", {:force=>:cascade}) - -> 0.0046s --- add_index("members", ["access_level"], {:name=>"index_members_on_access_level", :using=>:btree}) - -> 0.0028s --- add_index("members", ["invite_token"], {:name=>"index_members_on_invite_token", :unique=>true, :using=>:btree}) - -> 0.0027s --- add_index("members", ["requested_at"], {:name=>"index_members_on_requested_at", :using=>:btree}) - -> 0.0025s --- add_index("members", ["source_id", "source_type"], {:name=>"index_members_on_source_id_and_source_type", :using=>:btree}) - -> 0.0027s --- add_index("members", ["user_id"], {:name=>"index_members_on_user_id", :using=>:btree}) - -> 0.0026s --- create_table("merge_request_diff_commits", {:id=>false, :force=>:cascade}) - -> 0.0027s --- add_index("merge_request_diff_commits", ["merge_request_diff_id", "relative_order"], {:name=>"index_merge_request_diff_commits_on_mr_diff_id_and_order", :unique=>true, :using=>:btree}) - -> 0.0032s --- add_index("merge_request_diff_commits", ["sha"], {:name=>"index_merge_request_diff_commits_on_sha", :using=>:btree}) - -> 0.0029s --- create_table("merge_request_diff_files", {:id=>false, :force=>:cascade}) - -> 0.0027s --- add_index("merge_request_diff_files", ["merge_request_diff_id", "relative_order"], {:name=>"index_merge_request_diff_files_on_mr_diff_id_and_order", :unique=>true, :using=>:btree}) - -> 0.0027s --- create_table("merge_request_diffs", {:force=>:cascade}) - -> 0.0042s --- add_index("merge_request_diffs", ["merge_request_id", "id"], {:name=>"index_merge_request_diffs_on_merge_request_id_and_id", :using=>:btree}) - -> 0.0030s --- create_table("merge_request_metrics", {:force=>:cascade}) - -> 0.0034s --- add_index("merge_request_metrics", ["first_deployed_to_production_at"], {:name=>"index_merge_request_metrics_on_first_deployed_to_production_at", :using=>:btree}) - -> 0.0028s --- add_index("merge_request_metrics", ["merge_request_id"], {:name=>"index_merge_request_metrics", :using=>:btree}) - -> 0.0025s --- add_index("merge_request_metrics", ["pipeline_id"], {:name=>"index_merge_request_metrics_on_pipeline_id", :using=>:btree}) - -> 0.0026s --- create_table("merge_requests", {:force=>:cascade}) - -> 0.0066s --- add_index("merge_requests", ["assignee_id"], {:name=>"index_merge_requests_on_assignee_id", :using=>:btree}) - -> 0.0029s --- add_index("merge_requests", ["author_id"], {:name=>"index_merge_requests_on_author_id", :using=>:btree}) - -> 0.0026s --- add_index("merge_requests", ["created_at"], {:name=>"index_merge_requests_on_created_at", :using=>:btree}) - -> 0.0026s --- add_index("merge_requests", ["description"], {:name=>"index_merge_requests_on_description_trigram", :using=>:gin, :opclasses=>{"description"=>"gin_trgm_ops"}}) - -> 0.0020s --- add_index("merge_requests", ["head_pipeline_id"], {:name=>"index_merge_requests_on_head_pipeline_id", :using=>:btree}) - -> 0.0027s --- add_index("merge_requests", ["latest_merge_request_diff_id"], {:name=>"index_merge_requests_on_latest_merge_request_diff_id", :using=>:btree}) - -> 0.0025s --- add_index("merge_requests", ["merge_user_id"], {:name=>"index_merge_requests_on_merge_user_id", :where=>"(merge_user_id IS NOT NULL)", :using=>:btree}) - -> 0.0029s --- add_index("merge_requests", ["milestone_id"], {:name=>"index_merge_requests_on_milestone_id", :using=>:btree}) - -> 0.0030s --- add_index("merge_requests", ["source_branch"], {:name=>"index_merge_requests_on_source_branch", :using=>:btree}) - -> 0.0026s --- add_index("merge_requests", ["source_project_id", "source_branch"], {:name=>"index_merge_requests_on_source_project_and_branch_state_opened", :where=>"((state)::text = 'opened'::text)", :using=>:btree}) - -> 0.0029s --- add_index("merge_requests", ["source_project_id", "source_branch"], {:name=>"index_merge_requests_on_source_project_id_and_source_branch", :using=>:btree}) - -> 0.0031s --- add_index("merge_requests", ["target_branch"], {:name=>"index_merge_requests_on_target_branch", :using=>:btree}) - -> 0.0028s --- add_index("merge_requests", ["target_project_id", "iid"], {:name=>"index_merge_requests_on_target_project_id_and_iid", :unique=>true, :using=>:btree}) - -> 0.0027s --- add_index("merge_requests", ["target_project_id", "merge_commit_sha", "id"], {:name=>"index_merge_requests_on_tp_id_and_merge_commit_sha_and_id", :using=>:btree}) - -> 0.0029s --- add_index("merge_requests", ["title"], {:name=>"index_merge_requests_on_title", :using=>:btree}) - -> 0.0026s --- add_index("merge_requests", ["title"], {:name=>"index_merge_requests_on_title_trigram", :using=>:gin, :opclasses=>{"title"=>"gin_trgm_ops"}}) - -> 0.0020s --- add_index("merge_requests", ["updated_by_id"], {:name=>"index_merge_requests_on_updated_by_id", :where=>"(updated_by_id IS NOT NULL)", :using=>:btree}) - -> 0.0029s --- create_table("merge_requests_closing_issues", {:force=>:cascade}) - -> 0.0031s --- add_index("merge_requests_closing_issues", ["issue_id"], {:name=>"index_merge_requests_closing_issues_on_issue_id", :using=>:btree}) - -> 0.0026s --- add_index("merge_requests_closing_issues", ["merge_request_id"], {:name=>"index_merge_requests_closing_issues_on_merge_request_id", :using=>:btree}) - -> 0.0028s --- create_table("milestones", {:force=>:cascade}) - -> 0.0044s --- add_index("milestones", ["description"], {:name=>"index_milestones_on_description_trigram", :using=>:gin, :opclasses=>{"description"=>"gin_trgm_ops"}}) - -> 0.0022s --- add_index("milestones", ["due_date"], {:name=>"index_milestones_on_due_date", :using=>:btree}) - -> 0.0033s --- add_index("milestones", ["group_id"], {:name=>"index_milestones_on_group_id", :using=>:btree}) - -> 0.0028s --- add_index("milestones", ["project_id", "iid"], {:name=>"index_milestones_on_project_id_and_iid", :unique=>true, :using=>:btree}) - -> 0.0028s --- add_index("milestones", ["title"], {:name=>"index_milestones_on_title", :using=>:btree}) - -> 0.0026s --- add_index("milestones", ["title"], {:name=>"index_milestones_on_title_trigram", :using=>:gin, :opclasses=>{"title"=>"gin_trgm_ops"}}) - -> 0.0021s --- create_table("namespaces", {:force=>:cascade}) - -> 0.0068s --- add_index("namespaces", ["created_at"], {:name=>"index_namespaces_on_created_at", :using=>:btree}) - -> 0.0030s --- add_index("namespaces", ["name", "parent_id"], {:name=>"index_namespaces_on_name_and_parent_id", :unique=>true, :using=>:btree}) - -> 0.0030s --- add_index("namespaces", ["name"], {:name=>"index_namespaces_on_name_trigram", :using=>:gin, :opclasses=>{"name"=>"gin_trgm_ops"}}) - -> 0.0020s --- add_index("namespaces", ["owner_id"], {:name=>"index_namespaces_on_owner_id", :using=>:btree}) - -> 0.0028s --- add_index("namespaces", ["parent_id", "id"], {:name=>"index_namespaces_on_parent_id_and_id", :unique=>true, :using=>:btree}) - -> 0.0032s --- add_index("namespaces", ["path"], {:name=>"index_namespaces_on_path", :using=>:btree}) - -> 0.0031s --- add_index("namespaces", ["path"], {:name=>"index_namespaces_on_path_trigram", :using=>:gin, :opclasses=>{"path"=>"gin_trgm_ops"}}) - -> 0.0019s --- add_index("namespaces", ["require_two_factor_authentication"], {:name=>"index_namespaces_on_require_two_factor_authentication", :using=>:btree}) - -> 0.0029s --- add_index("namespaces", ["type"], {:name=>"index_namespaces_on_type", :using=>:btree}) - -> 0.0032s --- create_table("notes", {:force=>:cascade}) - -> 0.0055s --- add_index("notes", ["author_id"], {:name=>"index_notes_on_author_id", :using=>:btree}) - -> 0.0029s --- add_index("notes", ["commit_id"], {:name=>"index_notes_on_commit_id", :using=>:btree}) - -> 0.0028s --- add_index("notes", ["created_at"], {:name=>"index_notes_on_created_at", :using=>:btree}) - -> 0.0029s --- add_index("notes", ["discussion_id"], {:name=>"index_notes_on_discussion_id", :using=>:btree}) - -> 0.0029s --- add_index("notes", ["line_code"], {:name=>"index_notes_on_line_code", :using=>:btree}) - -> 0.0029s --- add_index("notes", ["note"], {:name=>"index_notes_on_note_trigram", :using=>:gin, :opclasses=>{"note"=>"gin_trgm_ops"}}) - -> 0.0024s --- add_index("notes", ["noteable_id", "noteable_type"], {:name=>"index_notes_on_noteable_id_and_noteable_type", :using=>:btree}) - -> 0.0029s --- add_index("notes", ["noteable_type"], {:name=>"index_notes_on_noteable_type", :using=>:btree}) - -> 0.0030s --- add_index("notes", ["project_id", "noteable_type"], {:name=>"index_notes_on_project_id_and_noteable_type", :using=>:btree}) - -> 0.0027s --- add_index("notes", ["updated_at"], {:name=>"index_notes_on_updated_at", :using=>:btree}) - -> 0.0026s --- create_table("notification_settings", {:force=>:cascade}) - -> 0.0053s --- add_index("notification_settings", ["source_id", "source_type"], {:name=>"index_notification_settings_on_source_id_and_source_type", :using=>:btree}) - -> 0.0028s --- add_index("notification_settings", ["user_id", "source_id", "source_type"], {:name=>"index_notifications_on_user_id_and_source_id_and_source_type", :unique=>true, :using=>:btree}) - -> 0.0030s --- add_index("notification_settings", ["user_id"], {:name=>"index_notification_settings_on_user_id", :using=>:btree}) - -> 0.0031s --- create_table("oauth_access_grants", {:force=>:cascade}) - -> 0.0042s --- add_index("oauth_access_grants", ["token"], {:name=>"index_oauth_access_grants_on_token", :unique=>true, :using=>:btree}) - -> 0.0031s --- create_table("oauth_access_tokens", {:force=>:cascade}) - -> 0.0051s --- add_index("oauth_access_tokens", ["refresh_token"], {:name=>"index_oauth_access_tokens_on_refresh_token", :unique=>true, :using=>:btree}) - -> 0.0030s --- add_index("oauth_access_tokens", ["resource_owner_id"], {:name=>"index_oauth_access_tokens_on_resource_owner_id", :using=>:btree}) - -> 0.0025s --- add_index("oauth_access_tokens", ["token"], {:name=>"index_oauth_access_tokens_on_token", :unique=>true, :using=>:btree}) - -> 0.0026s --- create_table("oauth_applications", {:force=>:cascade}) - -> 0.0049s --- add_index("oauth_applications", ["owner_id", "owner_type"], {:name=>"index_oauth_applications_on_owner_id_and_owner_type", :using=>:btree}) - -> 0.0030s --- add_index("oauth_applications", ["uid"], {:name=>"index_oauth_applications_on_uid", :unique=>true, :using=>:btree}) - -> 0.0032s --- create_table("oauth_openid_requests", {:force=>:cascade}) - -> 0.0048s --- create_table("pages_domains", {:force=>:cascade}) - -> 0.0052s --- add_index("pages_domains", ["domain"], {:name=>"index_pages_domains_on_domain", :unique=>true, :using=>:btree}) - -> 0.0027s --- add_index("pages_domains", ["project_id"], {:name=>"index_pages_domains_on_project_id", :using=>:btree}) - -> 0.0030s --- create_table("personal_access_tokens", {:force=>:cascade}) - -> 0.0056s --- add_index("personal_access_tokens", ["token"], {:name=>"index_personal_access_tokens_on_token", :unique=>true, :using=>:btree}) - -> 0.0032s --- add_index("personal_access_tokens", ["user_id"], {:name=>"index_personal_access_tokens_on_user_id", :using=>:btree}) - -> 0.0028s --- create_table("project_authorizations", {:id=>false, :force=>:cascade}) - -> 0.0018s --- add_index("project_authorizations", ["project_id"], {:name=>"index_project_authorizations_on_project_id", :using=>:btree}) - -> 0.0033s --- add_index("project_authorizations", ["user_id", "project_id", "access_level"], {:name=>"index_project_authorizations_on_user_id_project_id_access_level", :unique=>true, :using=>:btree}) - -> 0.0029s --- create_table("project_auto_devops", {:force=>:cascade}) - -> 0.0043s --- add_index("project_auto_devops", ["project_id"], {:name=>"index_project_auto_devops_on_project_id", :unique=>true, :using=>:btree}) - -> 0.0029s --- create_table("project_custom_attributes", {:force=>:cascade}) - -> 0.0047s --- add_index("project_custom_attributes", ["key", "value"], {:name=>"index_project_custom_attributes_on_key_and_value", :using=>:btree}) - -> 0.0030s --- add_index("project_custom_attributes", ["project_id", "key"], {:name=>"index_project_custom_attributes_on_project_id_and_key", :unique=>true, :using=>:btree}) - -> 0.0028s --- create_table("project_features", {:force=>:cascade}) - -> 0.0038s --- add_index("project_features", ["project_id"], {:name=>"index_project_features_on_project_id", :using=>:btree}) - -> 0.0029s --- create_table("project_group_links", {:force=>:cascade}) - -> 0.0036s --- add_index("project_group_links", ["group_id"], {:name=>"index_project_group_links_on_group_id", :using=>:btree}) - -> 0.0028s --- add_index("project_group_links", ["project_id"], {:name=>"index_project_group_links_on_project_id", :using=>:btree}) - -> 0.0030s --- create_table("project_import_data", {:force=>:cascade}) - -> 0.0049s --- add_index("project_import_data", ["project_id"], {:name=>"index_project_import_data_on_project_id", :using=>:btree}) - -> 0.0027s --- create_table("project_statistics", {:force=>:cascade}) - -> 0.0046s --- add_index("project_statistics", ["namespace_id"], {:name=>"index_project_statistics_on_namespace_id", :using=>:btree}) - -> 0.0027s --- add_index("project_statistics", ["project_id"], {:name=>"index_project_statistics_on_project_id", :unique=>true, :using=>:btree}) - -> 0.0029s --- create_table("projects", {:force=>:cascade}) - -> 0.0090s --- add_index("projects", ["ci_id"], {:name=>"index_projects_on_ci_id", :using=>:btree}) - -> 0.0033s --- add_index("projects", ["created_at"], {:name=>"index_projects_on_created_at", :using=>:btree}) - -> 0.0030s --- add_index("projects", ["creator_id"], {:name=>"index_projects_on_creator_id", :using=>:btree}) - -> 0.0028s --- add_index("projects", ["description"], {:name=>"index_projects_on_description_trigram", :using=>:gin, :opclasses=>{"description"=>"gin_trgm_ops"}}) - -> 0.0022s --- add_index("projects", ["last_activity_at"], {:name=>"index_projects_on_last_activity_at", :using=>:btree}) - -> 0.0032s --- add_index("projects", ["last_repository_check_failed"], {:name=>"index_projects_on_last_repository_check_failed", :using=>:btree}) - -> 0.0030s --- add_index("projects", ["last_repository_updated_at"], {:name=>"index_projects_on_last_repository_updated_at", :using=>:btree}) - -> 0.0031s --- add_index("projects", ["name"], {:name=>"index_projects_on_name_trigram", :using=>:gin, :opclasses=>{"name"=>"gin_trgm_ops"}}) - -> 0.0022s --- add_index("projects", ["namespace_id"], {:name=>"index_projects_on_namespace_id", :using=>:btree}) - -> 0.0028s --- add_index("projects", ["path"], {:name=>"index_projects_on_path", :using=>:btree}) - -> 0.0028s --- add_index("projects", ["path"], {:name=>"index_projects_on_path_trigram", :using=>:gin, :opclasses=>{"path"=>"gin_trgm_ops"}}) - -> 0.0023s --- add_index("projects", ["pending_delete"], {:name=>"index_projects_on_pending_delete", :using=>:btree}) - -> 0.0029s --- add_index("projects", ["repository_storage"], {:name=>"index_projects_on_repository_storage", :using=>:btree}) - -> 0.0026s --- add_index("projects", ["runners_token"], {:name=>"index_projects_on_runners_token", :using=>:btree}) - -> 0.0034s --- add_index("projects", ["star_count"], {:name=>"index_projects_on_star_count", :using=>:btree}) - -> 0.0028s --- add_index("projects", ["visibility_level"], {:name=>"index_projects_on_visibility_level", :using=>:btree}) - -> 0.0027s --- create_table("protected_branch_merge_access_levels", {:force=>:cascade}) - -> 0.0042s --- add_index("protected_branch_merge_access_levels", ["protected_branch_id"], {:name=>"index_protected_branch_merge_access", :using=>:btree}) - -> 0.0029s --- create_table("protected_branch_push_access_levels", {:force=>:cascade}) - -> 0.0037s --- add_index("protected_branch_push_access_levels", ["protected_branch_id"], {:name=>"index_protected_branch_push_access", :using=>:btree}) - -> 0.0030s --- create_table("protected_branches", {:force=>:cascade}) - -> 0.0048s --- add_index("protected_branches", ["project_id"], {:name=>"index_protected_branches_on_project_id", :using=>:btree}) - -> 0.0030s --- create_table("protected_tag_create_access_levels", {:force=>:cascade}) - -> 0.0037s --- add_index("protected_tag_create_access_levels", ["protected_tag_id"], {:name=>"index_protected_tag_create_access", :using=>:btree}) - -> 0.0029s --- add_index("protected_tag_create_access_levels", ["user_id"], {:name=>"index_protected_tag_create_access_levels_on_user_id", :using=>:btree}) - -> 0.0029s --- create_table("protected_tags", {:force=>:cascade}) - -> 0.0051s --- add_index("protected_tags", ["project_id"], {:name=>"index_protected_tags_on_project_id", :using=>:btree}) - -> 0.0034s --- create_table("push_event_payloads", {:id=>false, :force=>:cascade}) - -> 0.0030s --- add_index("push_event_payloads", ["event_id"], {:name=>"index_push_event_payloads_on_event_id", :unique=>true, :using=>:btree}) - -> 0.0029s --- create_table("redirect_routes", {:force=>:cascade}) - -> 0.0049s --- add_index("redirect_routes", ["path"], {:name=>"index_redirect_routes_on_path", :unique=>true, :using=>:btree}) - -> 0.0031s --- add_index("redirect_routes", ["source_type", "source_id"], {:name=>"index_redirect_routes_on_source_type_and_source_id", :using=>:btree}) - -> 0.0034s --- create_table("releases", {:force=>:cascade}) - -> 0.0043s --- add_index("releases", ["project_id", "tag"], {:name=>"index_releases_on_project_id_and_tag", :using=>:btree}) - -> 0.0032s --- add_index("releases", ["project_id"], {:name=>"index_releases_on_project_id", :using=>:btree}) - -> 0.0030s --- create_table("routes", {:force=>:cascade}) - -> 0.0055s --- add_index("routes", ["path"], {:name=>"index_routes_on_path", :unique=>true, :using=>:btree}) - -> 0.0028s --- add_index("routes", ["path"], {:name=>"index_routes_on_path_text_pattern_ops", :using=>:btree, :opclasses=>{"path"=>"varchar_pattern_ops"}}) - -> 0.0026s --- add_index("routes", ["source_type", "source_id"], {:name=>"index_routes_on_source_type_and_source_id", :unique=>true, :using=>:btree}) - -> 0.0029s --- create_table("sent_notifications", {:force=>:cascade}) - -> 0.0048s --- add_index("sent_notifications", ["reply_key"], {:name=>"index_sent_notifications_on_reply_key", :unique=>true, :using=>:btree}) - -> 0.0029s --- create_table("services", {:force=>:cascade}) - -> 0.0091s --- add_index("services", ["project_id"], {:name=>"index_services_on_project_id", :using=>:btree}) - -> 0.0028s --- add_index("services", ["template"], {:name=>"index_services_on_template", :using=>:btree}) - -> 0.0031s --- create_table("snippets", {:force=>:cascade}) - -> 0.0050s --- add_index("snippets", ["author_id"], {:name=>"index_snippets_on_author_id", :using=>:btree}) - -> 0.0030s --- add_index("snippets", ["file_name"], {:name=>"index_snippets_on_file_name_trigram", :using=>:gin, :opclasses=>{"file_name"=>"gin_trgm_ops"}}) - -> 0.0020s --- add_index("snippets", ["project_id"], {:name=>"index_snippets_on_project_id", :using=>:btree}) - -> 0.0028s --- add_index("snippets", ["title"], {:name=>"index_snippets_on_title_trigram", :using=>:gin, :opclasses=>{"title"=>"gin_trgm_ops"}}) - -> 0.0020s --- add_index("snippets", ["updated_at"], {:name=>"index_snippets_on_updated_at", :using=>:btree}) - -> 0.0026s --- add_index("snippets", ["visibility_level"], {:name=>"index_snippets_on_visibility_level", :using=>:btree}) - -> 0.0026s --- create_table("spam_logs", {:force=>:cascade}) - -> 0.0048s --- create_table("subscriptions", {:force=>:cascade}) - -> 0.0041s --- add_index("subscriptions", ["subscribable_id", "subscribable_type", "user_id", "project_id"], {:name=>"index_subscriptions_on_subscribable_and_user_id_and_project_id", :unique=>true, :using=>:btree}) - -> 0.0030s --- create_table("system_note_metadata", {:force=>:cascade}) - -> 0.0040s --- add_index("system_note_metadata", ["note_id"], {:name=>"index_system_note_metadata_on_note_id", :unique=>true, :using=>:btree}) - -> 0.0029s --- create_table("taggings", {:force=>:cascade}) - -> 0.0047s --- add_index("taggings", ["tag_id", "taggable_id", "taggable_type", "context", "tagger_id", "tagger_type"], {:name=>"taggings_idx", :unique=>true, :using=>:btree}) - -> 0.0030s --- add_index("taggings", ["taggable_id", "taggable_type", "context"], {:name=>"index_taggings_on_taggable_id_and_taggable_type_and_context", :using=>:btree}) - -> 0.0025s --- create_table("tags", {:force=>:cascade}) - -> 0.0044s --- add_index("tags", ["name"], {:name=>"index_tags_on_name", :unique=>true, :using=>:btree}) - -> 0.0026s --- create_table("timelogs", {:force=>:cascade}) - -> 0.0033s --- add_index("timelogs", ["issue_id"], {:name=>"index_timelogs_on_issue_id", :using=>:btree}) - -> 0.0027s --- add_index("timelogs", ["merge_request_id"], {:name=>"index_timelogs_on_merge_request_id", :using=>:btree}) - -> 0.0033s --- add_index("timelogs", ["user_id"], {:name=>"index_timelogs_on_user_id", :using=>:btree}) - -> 0.0028s --- create_table("todos", {:force=>:cascade}) - -> 0.0043s --- add_index("todos", ["author_id"], {:name=>"index_todos_on_author_id", :using=>:btree}) - -> 0.0027s --- add_index("todos", ["commit_id"], {:name=>"index_todos_on_commit_id", :using=>:btree}) - -> 0.0028s --- add_index("todos", ["note_id"], {:name=>"index_todos_on_note_id", :using=>:btree}) - -> 0.0028s --- add_index("todos", ["project_id"], {:name=>"index_todos_on_project_id", :using=>:btree}) - -> 0.0027s --- add_index("todos", ["target_type", "target_id"], {:name=>"index_todos_on_target_type_and_target_id", :using=>:btree}) - -> 0.0028s --- add_index("todos", ["user_id"], {:name=>"index_todos_on_user_id", :using=>:btree}) - -> 0.0026s --- create_table("trending_projects", {:force=>:cascade}) - -> 0.0030s --- add_index("trending_projects", ["project_id"], {:name=>"index_trending_projects_on_project_id", :using=>:btree}) - -> 0.0027s --- create_table("u2f_registrations", {:force=>:cascade}) - -> 0.0048s --- add_index("u2f_registrations", ["key_handle"], {:name=>"index_u2f_registrations_on_key_handle", :using=>:btree}) - -> 0.0029s --- add_index("u2f_registrations", ["user_id"], {:name=>"index_u2f_registrations_on_user_id", :using=>:btree}) - -> 0.0028s --- create_table("uploads", {:force=>:cascade}) - -> 0.0044s --- add_index("uploads", ["checksum"], {:name=>"index_uploads_on_checksum", :using=>:btree}) - -> 0.0028s --- add_index("uploads", ["model_id", "model_type"], {:name=>"index_uploads_on_model_id_and_model_type", :using=>:btree}) - -> 0.0027s --- add_index("uploads", ["path"], {:name=>"index_uploads_on_path", :using=>:btree}) - -> 0.0028s --- create_table("user_agent_details", {:force=>:cascade}) - -> 0.0051s --- add_index("user_agent_details", ["subject_id", "subject_type"], {:name=>"index_user_agent_details_on_subject_id_and_subject_type", :using=>:btree}) - -> 0.0028s --- create_table("user_custom_attributes", {:force=>:cascade}) - -> 0.0044s --- add_index("user_custom_attributes", ["key", "value"], {:name=>"index_user_custom_attributes_on_key_and_value", :using=>:btree}) - -> 0.0027s --- add_index("user_custom_attributes", ["user_id", "key"], {:name=>"index_user_custom_attributes_on_user_id_and_key", :unique=>true, :using=>:btree}) - -> 0.0026s --- create_table("user_synced_attributes_metadata", {:force=>:cascade}) - -> 0.0056s --- add_index("user_synced_attributes_metadata", ["user_id"], {:name=>"index_user_synced_attributes_metadata_on_user_id", :unique=>true, :using=>:btree}) - -> 0.0027s --- create_table("users", {:force=>:cascade}) - -> 0.0134s --- add_index("users", ["admin"], {:name=>"index_users_on_admin", :using=>:btree}) - -> 0.0030s --- add_index("users", ["confirmation_token"], {:name=>"index_users_on_confirmation_token", :unique=>true, :using=>:btree}) - -> 0.0029s --- add_index("users", ["created_at"], {:name=>"index_users_on_created_at", :using=>:btree}) - -> 0.0034s --- add_index("users", ["email"], {:name=>"index_users_on_email", :unique=>true, :using=>:btree}) - -> 0.0030s --- add_index("users", ["email"], {:name=>"index_users_on_email_trigram", :using=>:gin, :opclasses=>{"email"=>"gin_trgm_ops"}}) - -> 0.0431s --- add_index("users", ["ghost"], {:name=>"index_users_on_ghost", :using=>:btree}) - -> 0.0051s --- add_index("users", ["incoming_email_token"], {:name=>"index_users_on_incoming_email_token", :using=>:btree}) - -> 0.0044s --- add_index("users", ["name"], {:name=>"index_users_on_name", :using=>:btree}) - -> 0.0044s --- add_index("users", ["name"], {:name=>"index_users_on_name_trigram", :using=>:gin, :opclasses=>{"name"=>"gin_trgm_ops"}}) - -> 0.0034s --- add_index("users", ["reset_password_token"], {:name=>"index_users_on_reset_password_token", :unique=>true, :using=>:btree}) - -> 0.0044s --- add_index("users", ["rss_token"], {:name=>"index_users_on_rss_token", :using=>:btree}) - -> 0.0046s --- add_index("users", ["state"], {:name=>"index_users_on_state", :using=>:btree}) - -> 0.0040s --- add_index("users", ["username"], {:name=>"index_users_on_username", :using=>:btree}) - -> 0.0046s --- add_index("users", ["username"], {:name=>"index_users_on_username_trigram", :using=>:gin, :opclasses=>{"username"=>"gin_trgm_ops"}}) - -> 0.0044s --- create_table("users_star_projects", {:force=>:cascade}) - -> 0.0055s --- add_index("users_star_projects", ["project_id"], {:name=>"index_users_star_projects_on_project_id", :using=>:btree}) - -> 0.0037s --- add_index("users_star_projects", ["user_id", "project_id"], {:name=>"index_users_star_projects_on_user_id_and_project_id", :unique=>true, :using=>:btree}) - -> 0.0044s --- create_table("web_hook_logs", {:force=>:cascade}) - -> 0.0060s --- add_index("web_hook_logs", ["web_hook_id"], {:name=>"index_web_hook_logs_on_web_hook_id", :using=>:btree}) - -> 0.0034s --- create_table("web_hooks", {:force=>:cascade}) - -> 0.0120s --- add_index("web_hooks", ["project_id"], {:name=>"index_web_hooks_on_project_id", :using=>:btree}) - -> 0.0038s --- add_index("web_hooks", ["type"], {:name=>"index_web_hooks_on_type", :using=>:btree}) - -> 0.0036s --- add_foreign_key("boards", "projects", {:name=>"fk_f15266b5f9", :on_delete=>:cascade}) - -> 0.0030s --- add_foreign_key("chat_teams", "namespaces", {:on_delete=>:cascade}) - -> 0.0021s --- add_foreign_key("ci_build_trace_section_names", "projects", {:on_delete=>:cascade}) - -> 0.0022s --- add_foreign_key("ci_build_trace_sections", "ci_build_trace_section_names", {:column=>"section_name_id", :name=>"fk_264e112c66", :on_delete=>:cascade}) - -> 0.0018s --- add_foreign_key("ci_build_trace_sections", "ci_builds", {:column=>"build_id", :name=>"fk_4ebe41f502", :on_delete=>:cascade}) - -> 0.0024s --- add_foreign_key("ci_build_trace_sections", "projects", {:on_delete=>:cascade}) - -> 0.0019s --- add_foreign_key("ci_builds", "ci_pipelines", {:column=>"auto_canceled_by_id", :name=>"fk_a2141b1522", :on_delete=>:nullify}) - -> 0.0023s --- add_foreign_key("ci_builds", "ci_stages", {:column=>"stage_id", :name=>"fk_3a9eaa254d", :on_delete=>:cascade}) - -> 0.0020s --- add_foreign_key("ci_builds", "projects", {:name=>"fk_befce0568a", :on_delete=>:cascade}) - -> 0.0024s --- add_foreign_key("ci_group_variables", "namespaces", {:column=>"group_id", :name=>"fk_33ae4d58d8", :on_delete=>:cascade}) - -> 0.0024s --- add_foreign_key("ci_job_artifacts", "ci_builds", {:column=>"job_id", :on_delete=>:cascade}) - -> 0.0019s --- add_foreign_key("ci_job_artifacts", "projects", {:on_delete=>:cascade}) - -> 0.0020s --- add_foreign_key("ci_pipeline_schedule_variables", "ci_pipeline_schedules", {:column=>"pipeline_schedule_id", :name=>"fk_41c35fda51", :on_delete=>:cascade}) - -> 0.0027s --- add_foreign_key("ci_pipeline_schedules", "projects", {:name=>"fk_8ead60fcc4", :on_delete=>:cascade}) - -> 0.0022s --- add_foreign_key("ci_pipeline_schedules", "users", {:column=>"owner_id", :name=>"fk_9ea99f58d2", :on_delete=>:nullify}) - -> 0.0025s --- add_foreign_key("ci_pipeline_variables", "ci_pipelines", {:column=>"pipeline_id", :name=>"fk_f29c5f4380", :on_delete=>:cascade}) - -> 0.0018s --- add_foreign_key("ci_pipelines", "ci_pipeline_schedules", {:column=>"pipeline_schedule_id", :name=>"fk_3d34ab2e06", :on_delete=>:nullify}) - -> 0.0019s --- add_foreign_key("ci_pipelines", "ci_pipelines", {:column=>"auto_canceled_by_id", :name=>"fk_262d4c2d19", :on_delete=>:nullify}) - -> 0.0029s --- add_foreign_key("ci_pipelines", "projects", {:name=>"fk_86635dbd80", :on_delete=>:cascade}) - -> 0.0023s --- add_foreign_key("ci_runner_projects", "projects", {:name=>"fk_4478a6f1e4", :on_delete=>:cascade}) - -> 0.0036s --- add_foreign_key("ci_stages", "ci_pipelines", {:column=>"pipeline_id", :name=>"fk_fb57e6cc56", :on_delete=>:cascade}) - -> 0.0017s --- add_foreign_key("ci_stages", "projects", {:name=>"fk_2360681d1d", :on_delete=>:cascade}) - -> 0.0020s --- add_foreign_key("ci_trigger_requests", "ci_triggers", {:column=>"trigger_id", :name=>"fk_b8ec8b7245", :on_delete=>:cascade}) - -> 0.0016s --- add_foreign_key("ci_triggers", "projects", {:name=>"fk_e3e63f966e", :on_delete=>:cascade}) - -> 0.0021s --- add_foreign_key("ci_triggers", "users", {:column=>"owner_id", :name=>"fk_e8e10d1964", :on_delete=>:cascade}) - -> 0.0019s --- add_foreign_key("ci_variables", "projects", {:name=>"fk_ada5eb64b3", :on_delete=>:cascade}) - -> 0.0021s --- add_foreign_key("cluster_platforms_kubernetes", "clusters", {:on_delete=>:cascade}) - -> 0.0019s --- add_foreign_key("cluster_projects", "clusters", {:on_delete=>:cascade}) - -> 0.0018s --- add_foreign_key("cluster_projects", "projects", {:on_delete=>:cascade}) - -> 0.0020s --- add_foreign_key("cluster_providers_gcp", "clusters", {:on_delete=>:cascade}) - -> 0.0017s --- add_foreign_key("clusters", "users", {:on_delete=>:nullify}) - -> 0.0018s --- add_foreign_key("clusters_applications_helm", "clusters", {:on_delete=>:cascade}) - -> 0.0019s --- add_foreign_key("container_repositories", "projects") - -> 0.0020s --- add_foreign_key("deploy_keys_projects", "projects", {:name=>"fk_58a901ca7e", :on_delete=>:cascade}) - -> 0.0019s --- add_foreign_key("deployments", "projects", {:name=>"fk_b9a3851b82", :on_delete=>:cascade}) - -> 0.0021s --- add_foreign_key("environments", "projects", {:name=>"fk_d1c8c1da6a", :on_delete=>:cascade}) - -> 0.0019s --- add_foreign_key("events", "projects", {:on_delete=>:cascade}) - -> 0.0020s --- add_foreign_key("events", "users", {:column=>"author_id", :name=>"fk_edfd187b6f", :on_delete=>:cascade}) - -> 0.0020s --- add_foreign_key("fork_network_members", "fork_networks", {:on_delete=>:cascade}) - -> 0.0016s --- add_foreign_key("fork_network_members", "projects", {:column=>"forked_from_project_id", :name=>"fk_b01280dae4", :on_delete=>:nullify}) - -> 0.0019s --- add_foreign_key("fork_network_members", "projects", {:on_delete=>:cascade}) - -> 0.0018s --- add_foreign_key("fork_networks", "projects", {:column=>"root_project_id", :name=>"fk_e7b436b2b5", :on_delete=>:nullify}) - -> 0.0018s --- add_foreign_key("forked_project_links", "projects", {:column=>"forked_to_project_id", :name=>"fk_434510edb0", :on_delete=>:cascade}) - -> 0.0018s --- add_foreign_key("gcp_clusters", "projects", {:on_delete=>:cascade}) - -> 0.0029s --- add_foreign_key("gcp_clusters", "services", {:on_delete=>:nullify}) - -> 0.0022s --- add_foreign_key("gcp_clusters", "users", {:on_delete=>:nullify}) - -> 0.0019s --- add_foreign_key("gpg_key_subkeys", "gpg_keys", {:on_delete=>:cascade}) - -> 0.0017s --- add_foreign_key("gpg_keys", "users", {:on_delete=>:cascade}) - -> 0.0019s --- add_foreign_key("gpg_signatures", "gpg_key_subkeys", {:on_delete=>:nullify}) - -> 0.0016s --- add_foreign_key("gpg_signatures", "gpg_keys", {:on_delete=>:nullify}) - -> 0.0016s --- add_foreign_key("gpg_signatures", "projects", {:on_delete=>:cascade}) - -> 0.0016s --- add_foreign_key("group_custom_attributes", "namespaces", {:column=>"group_id", :on_delete=>:cascade}) - -> 0.0014s --- add_foreign_key("issue_assignees", "issues", {:name=>"fk_b7d881734a", :on_delete=>:cascade}) - -> 0.0019s --- add_foreign_key("issue_assignees", "users", {:name=>"fk_5e0c8d9154", :on_delete=>:cascade}) - -> 0.0015s --- add_foreign_key("issue_metrics", "issues", {:on_delete=>:cascade}) - -> 0.0016s --- add_foreign_key("issues", "issues", {:column=>"moved_to_id", :name=>"fk_a194299be1", :on_delete=>:nullify}) - -> 0.0014s --- add_foreign_key("issues", "milestones", {:name=>"fk_96b1dd429c", :on_delete=>:nullify}) - -> 0.0016s --- add_foreign_key("issues", "projects", {:name=>"fk_899c8f3231", :on_delete=>:cascade}) - -> 0.0016s --- add_foreign_key("issues", "users", {:column=>"author_id", :name=>"fk_05f1e72feb", :on_delete=>:nullify}) - -> 0.0015s --- add_foreign_key("issues", "users", {:column=>"updated_by_id", :name=>"fk_ffed080f01", :on_delete=>:nullify}) - -> 0.0017s --- add_foreign_key("label_priorities", "labels", {:on_delete=>:cascade}) - -> 0.0015s --- add_foreign_key("label_priorities", "projects", {:on_delete=>:cascade}) - -> 0.0015s --- add_foreign_key("labels", "namespaces", {:column=>"group_id", :on_delete=>:cascade}) - -> 0.0015s --- add_foreign_key("labels", "projects", {:name=>"fk_7de4989a69", :on_delete=>:cascade}) - -> 0.0016s --- add_foreign_key("lists", "boards", {:name=>"fk_0d3f677137", :on_delete=>:cascade}) - -> 0.0015s --- add_foreign_key("lists", "labels", {:name=>"fk_7a5553d60f", :on_delete=>:cascade}) - -> 0.0014s --- add_foreign_key("members", "users", {:name=>"fk_2e88fb7ce9", :on_delete=>:cascade}) - -> 0.0016s --- add_foreign_key("merge_request_diff_commits", "merge_request_diffs", {:on_delete=>:cascade}) - -> 0.0014s --- add_foreign_key("merge_request_diff_files", "merge_request_diffs", {:on_delete=>:cascade}) - -> 0.0014s --- add_foreign_key("merge_request_diffs", "merge_requests", {:name=>"fk_8483f3258f", :on_delete=>:cascade}) - -> 0.0019s --- add_foreign_key("merge_request_metrics", "ci_pipelines", {:column=>"pipeline_id", :on_delete=>:cascade}) - -> 0.0017s --- add_foreign_key("merge_request_metrics", "merge_requests", {:on_delete=>:cascade}) - -> 0.0016s --- add_foreign_key("merge_request_metrics", "users", {:column=>"latest_closed_by_id", :name=>"fk_ae440388cc", :on_delete=>:nullify}) - -> 0.0015s --- add_foreign_key("merge_request_metrics", "users", {:column=>"merged_by_id", :name=>"fk_7f28d925f3", :on_delete=>:nullify}) - -> 0.0015s --- add_foreign_key("merge_requests", "ci_pipelines", {:column=>"head_pipeline_id", :name=>"fk_fd82eae0b9", :on_delete=>:nullify}) - -> 0.0014s --- add_foreign_key("merge_requests", "merge_request_diffs", {:column=>"latest_merge_request_diff_id", :name=>"fk_06067f5644", :on_delete=>:nullify}) - -> 0.0014s --- add_foreign_key("merge_requests", "milestones", {:name=>"fk_6a5165a692", :on_delete=>:nullify}) - -> 0.0015s --- add_foreign_key("merge_requests", "projects", {:column=>"source_project_id", :name=>"fk_3308fe130c", :on_delete=>:nullify}) - -> 0.0017s --- add_foreign_key("merge_requests", "projects", {:column=>"target_project_id", :name=>"fk_a6963e8447", :on_delete=>:cascade}) - -> 0.0016s --- add_foreign_key("merge_requests", "users", {:column=>"assignee_id", :name=>"fk_6149611a04", :on_delete=>:nullify}) - -> 0.0016s --- add_foreign_key("merge_requests", "users", {:column=>"author_id", :name=>"fk_e719a85f8a", :on_delete=>:nullify}) - -> 0.0017s --- add_foreign_key("merge_requests", "users", {:column=>"merge_user_id", :name=>"fk_ad525e1f87", :on_delete=>:nullify}) - -> 0.0018s --- add_foreign_key("merge_requests", "users", {:column=>"updated_by_id", :name=>"fk_641731faff", :on_delete=>:nullify}) - -> 0.0017s --- add_foreign_key("merge_requests_closing_issues", "issues", {:on_delete=>:cascade}) - -> 0.0016s --- add_foreign_key("merge_requests_closing_issues", "merge_requests", {:on_delete=>:cascade}) - -> 0.0014s --- add_foreign_key("milestones", "namespaces", {:column=>"group_id", :name=>"fk_95650a40d4", :on_delete=>:cascade}) - -> 0.0014s --- add_foreign_key("milestones", "projects", {:name=>"fk_9bd0a0c791", :on_delete=>:cascade}) - -> 0.0017s --- add_foreign_key("notes", "projects", {:name=>"fk_99e097b079", :on_delete=>:cascade}) - -> 0.0019s --- add_foreign_key("oauth_openid_requests", "oauth_access_grants", {:column=>"access_grant_id", :name=>"fk_oauth_openid_requests_oauth_access_grants_access_grant_id"}) - -> 0.0014s --- add_foreign_key("pages_domains", "projects", {:name=>"fk_ea2f6dfc6f", :on_delete=>:cascade}) - -> 0.0021s --- add_foreign_key("personal_access_tokens", "users") - -> 0.0016s --- add_foreign_key("project_authorizations", "projects", {:on_delete=>:cascade}) - -> 0.0016s --- add_foreign_key("project_authorizations", "users", {:on_delete=>:cascade}) - -> 0.0016s --- add_foreign_key("project_auto_devops", "projects", {:on_delete=>:cascade}) - -> 0.0026s --- add_foreign_key("project_custom_attributes", "projects", {:on_delete=>:cascade}) - -> 0.0016s --- add_foreign_key("project_features", "projects", {:name=>"fk_18513d9b92", :on_delete=>:cascade}) - -> 0.0020s --- add_foreign_key("project_group_links", "projects", {:name=>"fk_daa8cee94c", :on_delete=>:cascade}) - -> 0.0016s --- add_foreign_key("project_import_data", "projects", {:name=>"fk_ffb9ee3a10", :on_delete=>:cascade}) - -> 0.0016s --- add_foreign_key("project_statistics", "projects", {:on_delete=>:cascade}) - -> 0.0021s --- add_foreign_key("protected_branch_merge_access_levels", "protected_branches", {:name=>"fk_8a3072ccb3", :on_delete=>:cascade}) - -> 0.0014s --- add_foreign_key("protected_branch_push_access_levels", "protected_branches", {:name=>"fk_9ffc86a3d9", :on_delete=>:cascade}) - -> 0.0014s --- add_foreign_key("protected_branches", "projects", {:name=>"fk_7a9c6d93e7", :on_delete=>:cascade}) - -> 0.0016s --- add_foreign_key("protected_tag_create_access_levels", "namespaces", {:column=>"group_id"}) - -> 0.0016s --- add_foreign_key("protected_tag_create_access_levels", "protected_tags", {:name=>"fk_f7dfda8c51", :on_delete=>:cascade}) - -> 0.0013s --- add_foreign_key("protected_tag_create_access_levels", "users") - -> 0.0018s --- add_foreign_key("protected_tags", "projects", {:name=>"fk_8e4af87648", :on_delete=>:cascade}) - -> 0.0015s --- add_foreign_key("push_event_payloads", "events", {:name=>"fk_36c74129da", :on_delete=>:cascade}) - -> 0.0013s --- add_foreign_key("releases", "projects", {:name=>"fk_47fe2a0596", :on_delete=>:cascade}) - -> 0.0015s --- add_foreign_key("services", "projects", {:name=>"fk_71cce407f9", :on_delete=>:cascade}) - -> 0.0015s --- add_foreign_key("snippets", "projects", {:name=>"fk_be41fd4bb7", :on_delete=>:cascade}) - -> 0.0017s --- add_foreign_key("subscriptions", "projects", {:on_delete=>:cascade}) - -> 0.0018s --- add_foreign_key("system_note_metadata", "notes", {:name=>"fk_d83a918cb1", :on_delete=>:cascade}) - -> 0.0015s --- add_foreign_key("timelogs", "issues", {:name=>"fk_timelogs_issues_issue_id", :on_delete=>:cascade}) - -> 0.0015s --- add_foreign_key("timelogs", "merge_requests", {:name=>"fk_timelogs_merge_requests_merge_request_id", :on_delete=>:cascade}) - -> 0.0016s --- add_foreign_key("todos", "projects", {:name=>"fk_45054f9c45", :on_delete=>:cascade}) - -> 0.0018s --- add_foreign_key("trending_projects", "projects", {:on_delete=>:cascade}) - -> 0.0015s --- add_foreign_key("u2f_registrations", "users") - -> 0.0017s --- add_foreign_key("user_custom_attributes", "users", {:on_delete=>:cascade}) - -> 0.0019s --- add_foreign_key("user_synced_attributes_metadata", "users", {:on_delete=>:cascade}) - -> 0.0016s --- add_foreign_key("users_star_projects", "projects", {:name=>"fk_22cd27ddfc", :on_delete=>:cascade}) - -> 0.0016s --- add_foreign_key("web_hook_logs", "web_hooks", {:on_delete=>:cascade}) - -> 0.0014s --- add_foreign_key("web_hooks", "projects", {:name=>"fk_0c8ca6d9d1", :on_delete=>:cascade}) - -> 0.0017s --- initialize_schema_migrations_table() - -> 0.0112s -$ JOB_NAME=( $CI_JOB_NAME ) -$ export CI_NODE_INDEX=${JOB_NAME[-2]} -$ export CI_NODE_TOTAL=${JOB_NAME[-1]} -$ export KNAPSACK_REPORT_PATH=knapsack/${CI_PROJECT_NAME}/${JOB_NAME[0]}_node_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json -$ export KNAPSACK_GENERATE_REPORT=true -$ export CACHE_CLASSES=true -$ cp ${KNAPSACK_SPINACH_SUITE_REPORT_PATH} ${KNAPSACK_REPORT_PATH} -$ scripts/gitaly-test-spawn -Gem.path: ["/root/.gem/ruby/2.3.0", "/usr/local/lib/ruby/gems/2.3.0", "/usr/local/bundle"] -ENV['BUNDLE_GEMFILE']: nil -ENV['RUBYOPT']: nil -bundle config in /builds/gitlab-org/gitlab-ce -scripts/gitaly-test-spawn:10:in `
': undefined local variable or method `gitaly_dir' for main:Object (NameError) -Did you mean? gitaly_dir -Settings are listed in order of priority. The top value will be used. -retry -Set for your local app (/usr/local/bundle/config): 3 - -path -Set for your local app (/usr/local/bundle/config): "vendor" -Set via BUNDLE_PATH: "/usr/local/bundle" - -jobs -Set for your local app (/usr/local/bundle/config): "2" - -clean -Set for your local app (/usr/local/bundle/config): "true" - -without -Set for your local app (/usr/local/bundle/config): [:production] - -silence_root_warning -Set via BUNDLE_SILENCE_ROOT_WARNING: true - -app_config -Set via BUNDLE_APP_CONFIG: "/usr/local/bundle" - -install_flags -Set via BUNDLE_INSTALL_FLAGS: "--without=production --jobs=2 --path=vendor --retry=3 --quiet" - -bin -Set via BUNDLE_BIN: "/usr/local/bundle/bin" - -gemfile -Set via BUNDLE_GEMFILE: "/builds/gitlab-org/gitlab-ce/Gemfile" - -section_end:1517486961:build_script -section_start:1517486961:after_script -section_end:1517486962:after_script -section_start:1517486962:upload_artifacts -Uploading artifacts... -WARNING: coverage/: no matching files  -knapsack/: found 5 matching files  -WARNING: tmp/capybara/: no matching files  -Uploading artifacts to coordinator... ok  id=50551722 responseStatus=201 Created token=XkN753rp -section_end:1517486963:upload_artifacts -ERROR: Job failed: exit code 1 +$ wget https://about.gitlab.com/ +converted 'https://about.gitlab.com/' (ANSI_X3.4-1968) -> 'https://about.gitlab.com/' (UTF-8) +--2018-04-03 06:46:12-- https://about.gitlab.com/ +Resolving about.gitlab.com (about.gitlab.com)... 40.79.82.214 +Connecting to about.gitlab.com (about.gitlab.com)|40.79.82.214|:443... connected. +HTTP request sent, awaiting response... 200 OK +Length: 69778 (68K) [text/html] +Saving to: 'index.html' + + 0K .......... .......... .......... .......... .......... 73% 195K 0s + 50K .......... ........ 100% 37.5M=0.3s + +2018-04-03 06:46:14 (265 KB/s) - 'index.html' saved [69778/69778] + +$ cat index.html + The only single product for the complete DevOps lifecycle - GitLab | GitLab

Concurrent DevOps

A single application built from the ground up for the complete DevOps lifecycle.

Try GitLab for Free

Plan Plan: Get your best ideas into development.

Whether you use Waterfall, Agile, or Conversational Development, GitLab streamlines your collaborative workflows. Visualize, prioritize, coordinate, and track your progress your way with GitLab’s flexible project management tools.

Create Create: Securely write and manage code and project data.

Consolidate source code into a single DVCS that’s easily managed and controlled without disrupting your workflow. GitLab’s git repositories come complete with branching tools and access controls, providing a scalable, single source of truth for collaborating on projects and code.

Verify: Ship better software, faster.

Spot errors sooner and shorten feedback cycles with built-in code review, code testing, code quality, and review apps. Customize your approval workflow controls, automatically test the quality of your code, and spin up a staging environment for every code change. GitLab Continuous Integration is the most popular next generation testing system that auto scales to run your tests faster.

Review Package: Manage custom container images with ease.

GitLab Container Registry gives you the enhanced security and access controls of custom Docker images without 3rd party add-ons. Easily upload and download images from GitLab CI with full Git repository management integration.

Measure Release: Minimize complexity with built-in Continuous Delivery.

Spend less time configuring your tools, and more time creating. Whether you’re deploying to one server or thousands, build, test, and release your code confidently and securely with GitLab’s built-in continuous delivery and deployment.

Configure: Automate configuration management

Automate your entire workflow from build to deploy and monitoring with GitLab Auto Devops. Best practice templates get you started with minimal to zero configuration. Then customize everything from buildpacks to CI/CD.

Release Monitor: Analyze shipping velocity and monitor application performance.

Measure how long it takes to go from planning to monitoring and ensure your applications are always responsive and available. GitLab collects and displays performance metrics for deployed apps using Prometheus so you can know in an instant how code changes impact your production environment.

Open core and continuously improved

With major product releases every 22nd of the month, GitLab’s high-velocity pace of innovation is possible because of the collaboration of the GitLab community, delivering the leading modern software development product, built for today’s modern software developer.

GitLab named a Leader in the Forrester Wave™

Receiving the highest score in Forrester’s Current Offering evaluation, GitLab was named as a Leader in Continuous Integration in The Forrester Wave™: Continuous Integration Tools, Q3 2017 report. According to the report, “GitLab delivers ease of use, scalability, integration, and innovation.”

GitLab is in the Leader’s arc

Try GitLab Enterprise Edition free for 30 days.

GitLab Enterprise Edition is built on the open core of GitLab Community Edition to include additional authentication, user management, and workflow features designed for larger development teams.

No credit card required.

Start your free trial Join a live demo

Concurrent DevOps

Visible

Real time view across the entire lifecycle

  • See everything that matters
  • Stay in your flow
  • Don’t wait on syncing
  • Manage projects, not tools
  • Improve cycle time.

Efficient

Collaborate without waiting

  • Start immediately
  • Work concurrently
  • No more handoffs

Governed

Develop and operate with confidence

  • Security and compliance already built-in
  • Simplify user management
  • Expedite auditing
  • Act with certainty
10.6

New features every month

In this month’s release of GitLab 10.6, we've added CI/CD for GitHub, improved Kubernetes integration, and much more.

Used by more than 100,000 organizations around the globe

  • sony
    Sony
  • comcast
    Comcast
  • nasa
    NASA
  • bayer
    Bayer
  • nasdaq
    Nasdaq
Roger Meier

We decided to use an Open Source based platform so we can participate in the development and contribute to the features and concepts we need such as file size statistics, OpenID Connect, GPG, and Docker Registry. GitLab's built-in Continuous Integration and independent CI Runners allows our developers to integrate very specific environments, boosting productivity and increasing developer satisfaction.

Roger Meier, Social Coding Platform, Siemens
GitLab.com screenshot

GitLab.com

Host and explore projects

Sign up
GitLab Navigation screenshot

Blog

Global Developer Report: 2018 is the year for open source and DevOps

Read more
IBM Think Logo

Event

Join us at IBM Think March 19—22

View all events
GitLab Enterprise screenshot

Demo

Inside GitLab Enterprise

Register now
Job succeeded  \ No newline at end of file diff --git a/spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb b/spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb index c2c7fe1c8d32..d19f181f74b7 100644 --- a/spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb +++ b/spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb @@ -51,7 +51,7 @@ end context 'when offset is size' do - let(:offset) { sample_trace_raw.length } + let(:offset) { sample_trace_raw.bytesize } it 'does nothing' do expect { subject }.not_to change { described_class.exist?(job_id) } diff --git a/spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb index 7a7a0760f2fe..48828101154f 100644 --- a/spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb +++ b/spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb @@ -90,7 +90,7 @@ subject { chunked_io.seek(pos, where) } before do - set_smaller_buffer_size_than(sample_trace_raw.length) + set_smaller_buffer_size_than(sample_trace_raw.bytesize) fill_trace_to_chunks(sample_trace_raw) end @@ -98,11 +98,11 @@ let(:pos) { 0 } let(:where) { IO::SEEK_END } - it { is_expected.to eq(sample_trace_raw.length) } + it { is_expected.to eq(sample_trace_raw.bytesize) } end context 'when moves pos to middle of the file' do - let(:pos) { sample_trace_raw.length / 2 } + let(:pos) { sample_trace_raw.bytesize / 2 } let(:where) { IO::SEEK_SET } it { is_expected.to eq(pos) } @@ -112,7 +112,7 @@ it 'matches the result' do expect(chunked_io.seek(0)).to eq(0) expect(chunked_io.seek(100, IO::SEEK_CUR)).to eq(100) - expect { chunked_io.seek(sample_trace_raw.length + 1, IO::SEEK_CUR) } + expect { chunked_io.seek(sample_trace_raw.bytesize + 1, IO::SEEK_CUR) } .to raise_error('new position is outside of file') end end @@ -122,13 +122,13 @@ subject { chunked_io.eof? } before do - set_smaller_buffer_size_than(sample_trace_raw.length) + set_smaller_buffer_size_than(sample_trace_raw.bytesize) fill_trace_to_chunks(sample_trace_raw) end context 'when current pos is at end of the file' do before do - chunked_io.seek(sample_trace_raw.length, IO::SEEK_SET) + chunked_io.seek(sample_trace_raw.bytesize, IO::SEEK_SET) end it { is_expected.to be_truthy } @@ -148,7 +148,7 @@ context 'when buffer size is smaller than file size' do before do - set_smaller_buffer_size_than(sample_trace_raw.length) + set_smaller_buffer_size_than(sample_trace_raw.bytesize) fill_trace_to_chunks(sample_trace_raw) end @@ -160,7 +160,7 @@ context 'when buffer size is larger than file size', :partial_support do before do - set_larger_buffer_size_than(sample_trace_raw.length) + set_larger_buffer_size_than(sample_trace_raw.bytesize) fill_trace_to_chunks(sample_trace_raw) end @@ -186,7 +186,7 @@ context 'when buffer size is smaller than file size' do before do - set_smaller_buffer_size_than(sample_trace_raw.length) + set_smaller_buffer_size_than(sample_trace_raw.bytesize) fill_trace_to_chunks(sample_trace_raw) end @@ -195,7 +195,7 @@ context 'when buffer size is larger than file size', :partial_support do before do - set_larger_buffer_size_than(sample_trace_raw.length) + set_larger_buffer_size_than(sample_trace_raw.bytesize) fill_trace_to_chunks(sample_trace_raw) end @@ -204,7 +204,7 @@ context 'when buffer size is half of file size' do before do - set_half_buffer_size_of(sample_trace_raw.length) + set_half_buffer_size_of(sample_trace_raw.bytesize) fill_trace_to_chunks(sample_trace_raw) end @@ -217,7 +217,7 @@ context 'when buffer size is smaller than file size' do before do - set_smaller_buffer_size_than(sample_trace_raw.length) + set_smaller_buffer_size_than(sample_trace_raw.bytesize) fill_trace_to_chunks(sample_trace_raw) end @@ -228,7 +228,7 @@ context 'when buffer size is larger than file size', :partial_support do before do - set_larger_buffer_size_than(sample_trace_raw.length) + set_larger_buffer_size_than(sample_trace_raw.bytesize) fill_trace_to_chunks(sample_trace_raw) end @@ -239,11 +239,11 @@ end context 'when tries to read oversize' do - let(:length) { sample_trace_raw.length + 1000 } + let(:length) { sample_trace_raw.bytesize + 1000 } context 'when buffer size is smaller than file size' do before do - set_smaller_buffer_size_than(sample_trace_raw.length) + set_smaller_buffer_size_than(sample_trace_raw.bytesize) fill_trace_to_chunks(sample_trace_raw) end @@ -254,7 +254,7 @@ context 'when buffer size is larger than file size', :partial_support do before do - set_larger_buffer_size_than(sample_trace_raw.length) + set_larger_buffer_size_than(sample_trace_raw.bytesize) fill_trace_to_chunks(sample_trace_raw) end @@ -269,7 +269,7 @@ context 'when buffer size is smaller than file size' do before do - set_smaller_buffer_size_than(sample_trace_raw.length) + set_smaller_buffer_size_than(sample_trace_raw.bytesize) fill_trace_to_chunks(sample_trace_raw) end @@ -280,7 +280,7 @@ context 'when buffer size is larger than file size', :partial_support do before do - set_larger_buffer_size_than(sample_trace_raw.length) + set_larger_buffer_size_than(sample_trace_raw.bytesize) fill_trace_to_chunks(sample_trace_raw) end @@ -306,7 +306,7 @@ context 'when buffer size is smaller than file size' do before do - set_smaller_buffer_size_than(sample_trace_raw.length) + set_smaller_buffer_size_than(sample_trace_raw.bytesize) fill_trace_to_chunks(sample_trace_raw) end @@ -315,7 +315,7 @@ context 'when buffer size is larger than file size', :partial_support do before do - set_larger_buffer_size_than(sample_trace_raw.length) + set_larger_buffer_size_than(sample_trace_raw.bytesize) fill_trace_to_chunks(sample_trace_raw) end @@ -324,7 +324,7 @@ context 'when buffer size is half of file size' do before do - set_half_buffer_size_of(sample_trace_raw.length) + set_half_buffer_size_of(sample_trace_raw.bytesize) fill_trace_to_chunks(sample_trace_raw) end @@ -333,7 +333,7 @@ context 'when pos is at middle of the file' do before do - set_smaller_buffer_size_than(sample_trace_raw.length) + set_smaller_buffer_size_than(sample_trace_raw.bytesize) fill_trace_to_chunks(sample_trace_raw) chunked_io.seek(chunked_io.size / 2) @@ -357,21 +357,21 @@ context 'when data does not exist' do shared_examples 'writes a trace' do it do - is_expected.to eq(data.length) + is_expected.to eq(data.bytesize) described_class.new(job_id, nil, 'rb') do |stream| expect(stream.read).to eq(data) expect(chunk_stores.inject(0) { |sum, store| sum + store.chunks_count(job_id) }) .to eq(stream.send(:chunks_count)) expect(chunk_stores.inject(0) { |sum, store| sum + store.chunks_size(job_id) }) - .to eq(data.length) + .to eq(data.bytesize) end end end context 'when buffer size is smaller than file size' do before do - set_smaller_buffer_size_than(data.length) + set_smaller_buffer_size_than(data.bytesize) end it_behaves_like 'writes a trace' @@ -379,7 +379,7 @@ context 'when buffer size is larger than file size', :partial_support do before do - set_larger_buffer_size_than(data.length) + set_larger_buffer_size_than(data.bytesize) end it_behaves_like 'writes a trace' @@ -387,7 +387,7 @@ context 'when buffer size is half of file size' do before do - set_half_buffer_size_of(data.length) + set_half_buffer_size_of(data.bytesize) end it_behaves_like 'writes a trace' @@ -404,12 +404,12 @@ context 'when data already exists', :partial_support do let(:exist_data) { 'exist data' } - let(:total_size) { exist_data.length + data.length } + let(:total_size) { exist_data.bytesize + data.bytesize } shared_examples 'appends a trace' do it do described_class.new(job_id, nil, 'a+b') do |stream| - expect(stream.write(data)).to eq(data.length) + expect(stream.write(data)).to eq(data.bytesize) end described_class.new(job_id, nil, 'rb') do |stream| @@ -424,7 +424,7 @@ context 'when buffer size is smaller than file size' do before do - set_smaller_buffer_size_than(data.length) + set_smaller_buffer_size_than(data.bytesize) fill_trace_to_chunks(exist_data) end @@ -433,7 +433,7 @@ context 'when buffer size is larger than file size', :partial_support do before do - set_larger_buffer_size_than(data.length) + set_larger_buffer_size_than(data.bytesize) fill_trace_to_chunks(exist_data) end @@ -442,7 +442,7 @@ context 'when buffer size is half of file size' do before do - set_half_buffer_size_of(data.length) + set_half_buffer_size_of(data.bytesize) fill_trace_to_chunks(exist_data) end -- GitLab From 9766af363dda76f2313f55202b76acafbf137e50 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Tue, 3 Apr 2018 18:47:54 +0900 Subject: [PATCH 22/86] Remove unrelated chanages --- lib/gitlab/ci/trace.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/gitlab/ci/trace.rb b/lib/gitlab/ci/trace.rb index b209377d4623..3dc4848c23da 100644 --- a/lib/gitlab/ci/trace.rb +++ b/lib/gitlab/ci/trace.rb @@ -77,7 +77,7 @@ def read def write stream = Gitlab::Ci::Trace::Stream.new do - if Feature.enabled?('ci_enable_live_trace') || true + if Feature.enabled?('ci_enable_live_trace') if current_path current_path else -- GitLab From cb3a0e33a051be1422c13494b282ea8c93f29e2a Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Tue, 3 Apr 2018 22:55:10 +0900 Subject: [PATCH 23/86] Fix bug; truncate did not reset size and tell --- lib/gitlab/ci/trace/chunked_file/chunked_io.rb | 6 +++--- lib/gitlab/ci/trace/chunked_file/live_trace.rb | 3 ++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb index 5767d1487cd9..55e04322ceaa 100644 --- a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb @@ -152,7 +152,7 @@ def in_range? end def get_chunk - return '' unless size > 0 + return '' if size <= 0 || eof? unless in_range? chunk_store.open(job_id, chunk_index, params_for_store) do |store| @@ -233,8 +233,8 @@ def chunks_count (size / buffer_size.to_f).ceil end - def last_range - ((size / buffer_size) * buffer_size..size) + def last_chunk? + ((size / buffer_size) * buffer_size..size).include?(tell) end def chunk_store diff --git a/lib/gitlab/ci/trace/chunked_file/live_trace.rb b/lib/gitlab/ci/trace/chunked_file/live_trace.rb index bf918fd4ace3..1d3433d88ce2 100644 --- a/lib/gitlab/ci/trace/chunked_file/live_trace.rb +++ b/lib/gitlab/ci/trace/chunked_file/live_trace.rb @@ -25,6 +25,7 @@ def stash_to_database(store) def truncate(offset) if offset == 0 delete + @size = @tell = 0 elsif offset == size # no-op else @@ -45,7 +46,7 @@ def calculate_size(job_id) end def chunk_store - if last_range.include?(tell) + if last_chunk? ChunkedFile::ChunkStore::Redis else ChunkedFile::ChunkStore::Database -- GitLab From de5194cdb8136d424e0ba88914645cb7936299be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Trzci=C5=84ski?= Date: Wed, 4 Apr 2018 12:48:30 +0200 Subject: [PATCH 24/86] Removed unused code --- .../ci/trace/chunked_file/chunk_store/base.rb | 63 ---- .../chunked_file/chunk_store/database.rb | 92 ------ .../trace/chunked_file/chunk_store/redis.rb | 132 --------- .../ci/trace/chunked_file/chunked_io.rb | 274 ------------------ .../trace/chunked_file/concerns/callbacks.rb | 37 --- .../ci/trace/chunked_file/concerns/errors.rb | 18 -- .../ci/trace/chunked_file/concerns/opener.rb | 23 -- .../chunked_file/concerns/permissions.rb | 93 ------ .../ci/trace/chunked_file/live_trace.rb | 63 ---- .../chunked_file/chunk_store/database_spec.rb | 222 -------------- .../chunked_file/chunk_store/redis_spec.rb | 273 ----------------- .../ci/trace/chunked_file/chunked_io_spec.rb | 32 -- .../ci/trace/chunked_file/live_trace_spec.rb | 89 ------ 13 files changed, 1411 deletions(-) delete mode 100644 lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb delete mode 100644 lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb delete mode 100644 lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb delete mode 100644 lib/gitlab/ci/trace/chunked_file/chunked_io.rb delete mode 100644 lib/gitlab/ci/trace/chunked_file/concerns/callbacks.rb delete mode 100644 lib/gitlab/ci/trace/chunked_file/concerns/errors.rb delete mode 100644 lib/gitlab/ci/trace/chunked_file/concerns/opener.rb delete mode 100644 lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb delete mode 100644 lib/gitlab/ci/trace/chunked_file/live_trace.rb delete mode 100644 spec/lib/gitlab/ci/trace/chunked_file/chunk_store/database_spec.rb delete mode 100644 spec/lib/gitlab/ci/trace/chunked_file/chunk_store/redis_spec.rb delete mode 100644 spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb delete mode 100644 spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb deleted file mode 100644 index 6e104a6d7641..000000000000 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb +++ /dev/null @@ -1,63 +0,0 @@ -module Gitlab - module Ci - class Trace - module ChunkedFile - module ChunkStore - class Base - attr_reader :params - - def initialize(*identifiers, **params) - @params = params - end - - def close - raise NotImplementedError - end - - def get - raise NotImplementedError - end - - def size - raise NotImplementedError - end - - # Write data to chunk store. Always overwrite. - # - # @param [String] data - # @return [Fixnum] length of the data after writing - def write!(data) - raise NotImplementedError - end - - # Append data to chunk store - # - # @param [String] data - # @return [Fixnum] length of the appended - def append!(data) - raise NotImplementedError - end - - # Truncate data to chunk store - # - # @param [String] offset - def truncate!(offset) - raise NotImplementedError - end - - # Delete data from chunk store - # - # @param [String] offset - def delete! - raise NotImplementedError - end - - def filled? - size == params[:buffer_size] - end - end - end - end - end - end -end diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb deleted file mode 100644 index d3665031e9d7..000000000000 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb +++ /dev/null @@ -1,92 +0,0 @@ -module Gitlab - module Ci - class Trace - module ChunkedFile - module ChunkStore - class Database < Base - class << self - def open(job_id, chunk_index, **params) - raise ArgumentError unless job_id && chunk_index - - job_trace_chunk = ::Ci::JobTraceChunk - .find_or_initialize_by(job_id: job_id, chunk_index: chunk_index) - store = self.new(job_trace_chunk, params) - - yield store - ensure - store&.close - end - - def exist?(job_id, chunk_index) - ::Ci::JobTraceChunk.exists?(job_id: job_id, chunk_index: chunk_index) - end - - def chunks_count(job_id) - ::Ci::JobTraceChunk.where(job_id: job_id).count - end - - def chunks_size(job_id) - ::Ci::JobTraceChunk.where(job_id: job_id).pluck('data') - .inject(0) { |sum, data| sum + data.bytesize } - end - - def delete_all(job_id) - ::Ci::JobTraceChunk.destroy_all(job_id: job_id) - end - end - - attr_reader :job_trace_chunk - - def initialize(job_trace_chunk, **params) - super - - @job_trace_chunk = job_trace_chunk - end - - def close - @job_trace_chunk = nil - end - - def get - puts "#{self.class.name} - #{__callee__}: params[:chunk_index]: #{params[:chunk_index]}" - - job_trace_chunk.data - end - - def size - job_trace_chunk.data&.bytesize || 0 - end - - def write!(data) - raise NotImplementedError, 'Partial writing is not supported' unless params[:buffer_size] == data&.bytesize - raise NotImplementedError, 'UPDATE (Overwriting data) is not supported' if job_trace_chunk.data - - puts "#{self.class.name} - #{__callee__}: data.bytesize: #{data.bytesize.inspect} params[:chunk_index]: #{params[:chunk_index]}" - - job_trace_chunk.data = data - job_trace_chunk.save! - - data.bytesize - end - - def append!(data) - raise NotImplementedError - end - - def truncate!(offset) - raise NotImplementedError - end - - def delete! - raise ActiveRecord::RecordNotFound, 'Could not find deletable record' unless job_trace_chunk.persisted? - - puts "#{self.class.name} - #{__callee__}: params[:chunk_index]: #{params[:chunk_index]}" - - job_trace_chunk.destroy! - end - end - end - end - end - end -end diff --git a/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb b/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb deleted file mode 100644 index 5d3b43f82916..000000000000 --- a/lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb +++ /dev/null @@ -1,132 +0,0 @@ -module Gitlab - module Ci - class Trace - module ChunkedFile - module ChunkStore - class Redis < Base - class << self - def open(job_id, chunk_index, **params) - raise ArgumentError unless job_id && chunk_index - - buffer_key = self.buffer_key(job_id, chunk_index) - store = self.new(buffer_key, params) - - yield store - ensure - store&.close - end - - def exist?(job_id, chunk_index) - Gitlab::Redis::Cache.with do |redis| - redis.exists(self.buffer_key(job_id, chunk_index)) - end - end - - def chunks_count(job_id) - Gitlab::Redis::Cache.with do |redis| - redis.scan_each(match: buffer_key(job_id, '?')).inject(0) do |sum, key| - sum + 1 - end - end - end - - def chunks_size(job_id) - Gitlab::Redis::Cache.with do |redis| - redis.scan_each(match: buffer_key(job_id, '?')).inject(0) do |sum, key| - sum + redis.strlen(key) - end - end - end - - def delete_all(job_id) - Gitlab::Redis::Cache.with do |redis| - redis.scan_each(match: buffer_key(job_id, '?')) do |key| - redis.del(key) - end - end - end - - def buffer_key(job_id, chunk_index) - "live_trace_buffer:#{job_id}:#{chunk_index}" - end - end - - BufferKeyNotFoundError = Class.new(StandardError) - WriteError = Class.new(StandardError) - - attr_reader :buffer_key - - def initialize(buffer_key, **params) - super - - @buffer_key = buffer_key - end - - def close - @buffer_key = nil - end - - def get - puts "#{self.class.name} - #{__callee__}: params[:chunk_index]: #{params[:chunk_index]}" - - Gitlab::Redis::Cache.with do |redis| - redis.get(buffer_key) - end - end - - def size - Gitlab::Redis::Cache.with do |redis| - redis.strlen(buffer_key) - end - end - - def write!(data) - raise ArgumentError, 'Could not write empty data' unless data.present? - - puts "#{self.class.name} - #{__callee__}: data.bytesize: #{data.bytesize.inspect} params[:chunk_index]: #{params[:chunk_index]}" - Gitlab::Redis::Cache.with do |redis| - unless redis.set(buffer_key, data) == 'OK' - raise WriteError, 'Failed to write' - end - - redis.strlen(buffer_key) - end - end - - def append!(data) - raise ArgumentError, 'Could not write empty data' unless data.present? - - puts "#{self.class.name} - #{__callee__}: data.bytesize: #{data.bytesize.inspect} params[:chunk_index]: #{params[:chunk_index]}" - Gitlab::Redis::Cache.with do |redis| - raise BufferKeyNotFoundError, 'Buffer key is not found' unless redis.exists(buffer_key) - - original_size = size - new_size = redis.append(buffer_key, data) - appended_size = new_size - original_size - - raise WriteError, 'Failed to append' unless appended_size == data.bytesize - - appended_size - end - end - - def truncate!(offset) - raise NotImplementedError - end - - def delete! - puts "#{self.class.name} - #{__callee__}: params[:chunk_index]: #{params[:chunk_index]}" - Gitlab::Redis::Cache.with do |redis| - raise BufferKeyNotFoundError, 'Buffer key is not found' unless redis.exists(buffer_key) - - unless redis.del(buffer_key) == 1 - raise WriteError, 'Failed to delete' - end - end - end - end - end - end - end - end -end diff --git a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb b/lib/gitlab/ci/trace/chunked_file/chunked_io.rb deleted file mode 100644 index 55e04322ceaa..000000000000 --- a/lib/gitlab/ci/trace/chunked_file/chunked_io.rb +++ /dev/null @@ -1,274 +0,0 @@ -## -# ChunkedIO Engine -# -# Choose a chunk_store with your purpose -# This class is designed that it's compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html) -module Gitlab - module Ci - class Trace - module ChunkedFile - class ChunkedIO - # extend ChunkedFile::Concerns::Opener - include ChunkedFile::Concerns::Errors - include ChunkedFile::Concerns::Hooks - include ChunkedFile::Concerns::Callbacks - prepend ChunkedFile::Concerns::Permissions - - attr_reader :size - attr_reader :tell - attr_reader :chunk, :chunk_range - attr_reader :job_id - attr_reader :mode - - alias_method :pos, :tell - - def initialize(job_id, size = nil, mode = 'rb', &block) - raise NotImplementedError, "Mode 'w' is not supported" if mode.include?('w') - - @size = size || calculate_size(job_id) - @tell = 0 - @job_id = job_id - @mode = mode - - if block_given? - begin - yield self - ensure - self.close - end - end - end - - def close - end - - def binmode - # no-op - end - - def binmode? - true - end - - def seek(amount, where = IO::SEEK_SET) - new_pos = - case where - when IO::SEEK_END - size + amount - when IO::SEEK_SET - amount - when IO::SEEK_CUR - tell + amount - else - -1 - end - - raise ArgumentError, 'new position is outside of file' if new_pos < 0 || new_pos > size - - @tell = new_pos - end - - def eof? - tell == size - end - - def each_line - until eof? - line = readline - break if line.nil? - - yield(line) - end - end - - def read(length = nil, outbuf = nil) - out = "" - - until eof? || (length && out.bytesize >= length) - data = get_chunk - break if data.empty? - - out << data - @tell += data.bytesize - end - - out = out.byteslice(0, length) if length && out.bytesize > length - - out - end - - def readline - out = "" - - until eof? - data = get_chunk - break if data.empty? - - new_line_pos = byte_position(data, "\n") - - if new_line_pos.nil? - out << data - @tell += data.bytesize - else - out << data.byteslice(0..new_line_pos) - @tell += new_line_pos + 1 - break - end - end - - out - end - - def write(data) - raise ArgumentError, 'Could not write empty data' unless data.present? - - if mode.include?('w') - raise NotImplementedError, "Overwrite is not supported" - elsif mode.include?('a') - write_as_append(data) - end - end - - def truncate(offset) - raise NotImplementedError - end - - def flush - # no-op - end - - def present? - chunks_count > 0 - end - - def delete - chunk_store.delete_all - end - - private - - def in_range? - @chunk_range&.include?(tell) - end - - def get_chunk - return '' if size <= 0 || eof? - - unless in_range? - chunk_store.open(job_id, chunk_index, params_for_store) do |store| - @chunk = store.get - - raise ReadError, 'Could not get a chunk' unless chunk && chunk.present? - - @chunk_range = (chunk_start...(chunk_start + chunk.bytesize)) - end - end - - @chunk.byteslice(chunk_offset, buffer_size) - end - - def write_as_append(data) - @tell = size - - data_size = data.bytesize - new_tell = tell + data_size - data_offset = 0 - - until tell == new_tell - writable_size = buffer_size - chunk_offset - writable_data = data.byteslice(data_offset, writable_size) - written_size = write_chunk(writable_data) - - data_offset += written_size - @tell += written_size - @size = [tell, size].max - end - - data_size - end - - def write_chunk(data) - written_size = 0 - - chunk_store.open(job_id, chunk_index, params_for_store) do |store| - with_callbacks(:write_chunk, store) do - written_size = if store.size > 0 # # rubocop:disable ZeroLengthPredicate - store.append!(data) - else - store.write!(data) - end - - raise WriteError, 'Written size mismatch' unless data.bytesize == written_size - end - end - - written_size - end - - def params_for_store(c_index = chunk_index) - { - buffer_size: buffer_size, - chunk_start: c_index * buffer_size, - chunk_index: c_index - } - end - - def chunk_offset - tell % buffer_size - end - - def chunk_start - chunk_index * buffer_size - end - - def chunk_end - [chunk_start + buffer_size, size].min - end - - def chunk_index - (tell / buffer_size) - end - - def chunks_count - (size / buffer_size.to_f).ceil - end - - def last_chunk? - ((size / buffer_size) * buffer_size..size).include?(tell) - end - - def chunk_store - raise NotImplementedError - end - - def buffer_size - raise NotImplementedError - end - - def calculate_size(job_id) - chunk_store.chunks_size(job_id) - end - - def byte_position(data, pattern_byte) - index_as_string = data.index(pattern_byte) - return nil unless index_as_string - - if data.getbyte(index_as_string) == pattern_byte.getbyte(0) - index_as_string - else - data2 = data.byteslice(index_as_string, 100) - additional_pos = 0 - data2.each_byte do |b| - break if b == pattern_byte.getbyte(0) - - additional_pos += 1 - end - - index_as_string + additional_pos - end - end - end - end - end - end -end diff --git a/lib/gitlab/ci/trace/chunked_file/concerns/callbacks.rb b/lib/gitlab/ci/trace/chunked_file/concerns/callbacks.rb deleted file mode 100644 index 3990a492612c..000000000000 --- a/lib/gitlab/ci/trace/chunked_file/concerns/callbacks.rb +++ /dev/null @@ -1,37 +0,0 @@ -module Gitlab - module Ci - class Trace - module ChunkedFile - module Concerns - module Callbacks - extend ActiveSupport::Concern - - included do - class_attribute :_before_callbacks, :_after_callbacks, instance_writer: false - self._before_callbacks = Hash.new [] - self._after_callbacks = Hash.new [] - end - - def with_callbacks(kind, *args) - self.class._before_callbacks[kind].each { |c| send c, *args } # rubocop:disable GitlabSecurity/PublicSend - yield - self.class._after_callbacks[kind].each { |c| send c, *args } # rubocop:disable GitlabSecurity/PublicSend - end - - module ClassMethods - def before_callback(kind, callback) - self._before_callbacks = self._before_callbacks - .merge kind => _before_callbacks[kind] + [callback] - end - - def after_callback(kind, callback) - self._after_callbacks = self._after_callbacks - .merge kind => _after_callbacks[kind] + [callback] - end - end - end - end - end - end - end -end diff --git a/lib/gitlab/ci/trace/chunked_file/concerns/errors.rb b/lib/gitlab/ci/trace/chunked_file/concerns/errors.rb deleted file mode 100644 index 5fba9605585f..000000000000 --- a/lib/gitlab/ci/trace/chunked_file/concerns/errors.rb +++ /dev/null @@ -1,18 +0,0 @@ -module Gitlab - module Ci - class Trace - module ChunkedFile - module Concerns - module Errors - extend ActiveSupport::Concern - - included do - WriteError = Class.new(StandardError) - ReadError = Class.new(StandardError) - end - end - end - end - end - end -end diff --git a/lib/gitlab/ci/trace/chunked_file/concerns/opener.rb b/lib/gitlab/ci/trace/chunked_file/concerns/opener.rb deleted file mode 100644 index 9f1f6eefcbc2..000000000000 --- a/lib/gitlab/ci/trace/chunked_file/concerns/opener.rb +++ /dev/null @@ -1,23 +0,0 @@ -module Gitlab - module Ci - class Trace - module ChunkedFile - module Concerns - module Opener - extend ActiveSupport::Concern - - class_methods do - def open(*args) - stream = self.new(*args) - - yield stream - ensure - stream&.close - end - end - end - end - end - end - end -end diff --git a/lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb b/lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb deleted file mode 100644 index 016b796afc26..000000000000 --- a/lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb +++ /dev/null @@ -1,93 +0,0 @@ -module Gitlab - module Ci - class Trace - module ChunkedFile - module Concerns - module Permissions - extend ActiveSupport::Concern - - WRITABLE_MODE = %w[a].freeze - READABLE_MODE = %w[r +].freeze - - included do - attr_reader :write_lock_uuid - end - - def initialize(job_id, size, mode = 'rb') - if WRITABLE_MODE.any? { |m| mode.include?(m) } - @write_lock_uuid = Gitlab::ExclusiveLease - .new(write_lock_key(job_id), timeout: 1.hour.to_i).try_obtain - - raise IOError, 'Already opened by another process' unless write_lock_uuid - end - - super - end - - def close - if write_lock_uuid - Gitlab::ExclusiveLease.cancel(write_lock_key(job_id), write_lock_uuid) - end - - super - end - - def read(*args) - can_read! - - super - end - - def readline(*args) - can_read! - - super - end - - def each_line(*args) - can_read! - - super - end - - def write(*args) - can_write! - - super - end - - def truncate(*args) - can_write! - - super - end - - def delete(*args) - can_write! - - super - end - - private - - def can_read! - unless READABLE_MODE.any? { |m| mode.include?(m) } - raise IOError, 'not opened for reading' - end - end - - def can_write! - unless WRITABLE_MODE.any? { |m| mode.include?(m) } - raise IOError, 'not opened for writing' - end - end - - def write_lock_key(job_id) - "live_trace:operation:write:#{job_id}" - end - end - end - end - end - end -end diff --git a/lib/gitlab/ci/trace/chunked_file/live_trace.rb b/lib/gitlab/ci/trace/chunked_file/live_trace.rb deleted file mode 100644 index 1d3433d88ce2..000000000000 --- a/lib/gitlab/ci/trace/chunked_file/live_trace.rb +++ /dev/null @@ -1,63 +0,0 @@ -module Gitlab - module Ci - class Trace - module ChunkedFile - class LiveTrace < ChunkedIO - class << self - def exist?(job_id) - ChunkedFile::ChunkStore::Redis.chunks_count(job_id) > 0 || ChunkedFile::ChunkStore::Database.chunks_count(job_id) > 0 - end - end - - after_callback :write_chunk, :stash_to_database - - def stash_to_database(store) - # Once data is filled into redis, move the data to database - if store.filled? - ChunkedFile::ChunkStore::Database.open(job_id, chunk_index, params_for_store) do |to_store| - to_store.write!(store.get) - store.delete! - end - end - end - - # This is more efficient than iterating each chunk store and deleting - def truncate(offset) - if offset == 0 - delete - @size = @tell = 0 - elsif offset == size - # no-op - else - raise NotImplementedError, 'Unexpected operation' - end - end - - def delete - ChunkedFile::ChunkStore::Redis.delete_all(job_id) - ChunkedFile::ChunkStore::Database.delete_all(job_id) - end - - private - - def calculate_size(job_id) - ChunkedFile::ChunkStore::Redis.chunks_size(job_id) + - ChunkedFile::ChunkStore::Database.chunks_size(job_id) - end - - def chunk_store - if last_chunk? - ChunkedFile::ChunkStore::Redis - else - ChunkedFile::ChunkStore::Database - end - end - - def buffer_size - 128.kilobytes - end - end - end - end - end -end diff --git a/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/database_spec.rb b/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/database_spec.rb deleted file mode 100644 index c84398ca4818..000000000000 --- a/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/database_spec.rb +++ /dev/null @@ -1,222 +0,0 @@ -require 'spec_helper' - -describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database do - let(:job) { create(:ci_build) } - let(:job_id) { job.id } - let(:chunk_index) { 0 } - let(:buffer_size) { 256 } - let(:job_trace_chunk) { ::Ci::JobTraceChunk.new(job_id: job_id, chunk_index: chunk_index) } - let(:params) { { buffer_size: buffer_size } } - let(:data) { 'A' * buffer_size } - - describe '.open' do - subject { described_class.open(job_id, chunk_index, params) } - - it 'opens' do - expect { |b| described_class.open(job_id, chunk_index, params, &b) } - .to yield_successive_args(described_class) - end - - context 'when job_id is nil' do - let(:job_id) { nil } - - it { expect { subject }.to raise_error(ArgumentError) } - end - - context 'when chunk_index is nil' do - let(:chunk_index) { nil } - - it { expect { subject }.to raise_error(ArgumentError) } - end - end - - describe '.exist?' do - subject { described_class.exist?(job_id, chunk_index) } - - context 'when job_trace_chunk exists' do - before do - described_class.new(job_trace_chunk, params).write!(data) - end - - it { is_expected.to be_truthy } - end - - context 'when job_trace_chunk does not exist' do - it { is_expected.to be_falsy } - end - end - - describe '.chunks_count' do - subject { described_class.chunks_count(job_id) } - - context 'when job_trace_chunk exists' do - before do - described_class.new(job_trace_chunk, params).write!(data) - end - - it { is_expected.to eq(1) } - - context 'when two chunks exists' do - let(:job_trace_chunk_2) { ::Ci::JobTraceChunk.new(job_id: job_id, chunk_index: chunk_index + 1) } - let(:data_2) { 'B' * buffer_size } - - before do - described_class.new(job_trace_chunk_2, params).write!(data_2) - end - - it { is_expected.to eq(2) } - end - end - - context 'when job_trace_chunk does not exist' do - it { is_expected.to eq(0) } - end - end - - describe '.chunks_size' do - subject { described_class.chunks_size(job_id) } - - context 'when job_trace_chunk exists' do - before do - described_class.new(job_trace_chunk, params).write!(data) - end - - it { is_expected.to eq(data.length) } - - context 'when two chunks exists' do - let(:job_trace_chunk_2) { ::Ci::JobTraceChunk.new(job_id: job_id, chunk_index: chunk_index + 1) } - let(:data_2) { 'B' * buffer_size } - let(:chunks_size) { data.length + data_2.length } - - before do - described_class.new(job_trace_chunk_2, params).write!(data_2) - end - - it { is_expected.to eq(chunks_size) } - end - end - - context 'when job_trace_chunk does not exist' do - it { is_expected.to eq(0) } - end - end - - describe '.delete_all' do - subject { described_class.delete_all(job_id) } - - context 'when job_trace_chunk exists' do - before do - described_class.new(job_trace_chunk, params).write!(data) - end - - it 'deletes all' do - expect { subject }.to change { described_class.chunks_count(job_id) }.by(-1) - end - - context 'when two chunks exists' do - let(:job_trace_chunk_2) { ::Ci::JobTraceChunk.new(job_id: job_id, chunk_index: chunk_index + 1) } - let(:data_2) { 'B' * buffer_size } - - before do - described_class.new(job_trace_chunk_2, params).write!(data_2) - end - - it 'deletes all' do - expect { subject }.to change { described_class.chunks_count(job_id) }.by(-2) - end - end - end - - context 'when buffer_key does not exist' do - it 'deletes all' do - expect { subject }.not_to change { described_class.chunks_count(job_id) } - end - end - end - - describe '#get' do - subject { described_class.new(job_trace_chunk, params).get } - - context 'when job_trace_chunk exists' do - before do - described_class.new(job_trace_chunk, params).write!(data) - end - - it { is_expected.to eq(data) } - end - - context 'when job_trace_chunk does not exist' do - it { is_expected.to be_nil } - end - end - - describe '#size' do - subject { described_class.new(job_trace_chunk, params).size } - - context 'when job_trace_chunk exists' do - before do - described_class.new(job_trace_chunk, params).write!(data) - end - - it { is_expected.to eq(data.length) } - end - - context 'when job_trace_chunk does not exist' do - it { is_expected.to eq(0) } - end - end - - describe '#write!' do - subject { described_class.new(job_trace_chunk, params).write!(data) } - - context 'when job_trace_chunk exists' do - before do - described_class.new(job_trace_chunk, params).write!(data) - end - - it { expect { subject }.to raise_error('UPDATE (Overwriting data) is not supported') } - end - - context 'when job_trace_chunk does not exist' do - let(:expected_data) { ::Ci::JobTraceChunk.find_by(job_id: job_id, chunk_index: chunk_index).data } - - it 'writes' do - is_expected.to eq(data.length) - - expect(expected_data).to eq(data) - end - end - - context 'when data is nil' do - let(:data) { nil } - - it { expect { subject }.to raise_error('Partial writing is not supported') } - end - end - - describe '#delete!' do - subject { described_class.new(job_trace_chunk, params).delete! } - - context 'when job_trace_chunk exists' do - before do - described_class.new(job_trace_chunk, params).write!(data) - end - - it 'deletes' do - expect(::Ci::JobTraceChunk.exists?(job_id: job_id, chunk_index: chunk_index)) - .to be_truthy - - subject - - expect(::Ci::JobTraceChunk.exists?(job_id: job_id, chunk_index: chunk_index)) - .to be_falsy - end - end - - context 'when job_trace_chunk does not exist' do - it 'raises an error' do - expect { subject }.to raise_error('Could not find deletable record') - end - end - end -end diff --git a/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/redis_spec.rb b/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/redis_spec.rb deleted file mode 100644 index f1fb64225c90..000000000000 --- a/spec/lib/gitlab/ci/trace/chunked_file/chunk_store/redis_spec.rb +++ /dev/null @@ -1,273 +0,0 @@ -require 'spec_helper' - -describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_cache do - let(:job) { create(:ci_build) } - let(:job_id) { job.id } - let(:chunk_index) { 0 } - let(:buffer_size) { 128.kilobytes } - let(:buffer_key) { described_class.buffer_key(job_id, chunk_index) } - let(:params) { { buffer_size: buffer_size } } - let(:data) { 'Here is the trace' } - - describe '.open' do - subject { described_class.open(job_id, chunk_index, params) } - - it 'opens' do - expect { |b| described_class.open(job_id, chunk_index, params, &b) } - .to yield_successive_args(described_class) - end - - context 'when job_id is nil' do - let(:job_id) { nil } - - it { expect { subject }.to raise_error(ArgumentError) } - end - - context 'when chunk_index is nil' do - let(:chunk_index) { nil } - - it { expect { subject }.to raise_error(ArgumentError) } - end - end - - describe '.exist?' do - subject { described_class.exist?(job_id, chunk_index) } - - context 'when buffer_key exists' do - before do - described_class.new(buffer_key, params).write!(data) - end - - it { is_expected.to be_truthy } - end - - context 'when buffer_key does not exist' do - it { is_expected.to be_falsy } - end - end - - describe '.chunks_count' do - subject { described_class.chunks_count(job_id) } - - context 'when buffer_key exists' do - before do - described_class.new(buffer_key, params).write!(data) - end - - it { is_expected.to eq(1) } - - context 'when two chunks exists' do - let(:buffer_key_2) { described_class.buffer_key(job_id, chunk_index + 1) } - let(:data_2) { 'Another data' } - - before do - described_class.new(buffer_key_2, params).write!(data_2) - end - - it { is_expected.to eq(2) } - end - end - - context 'when buffer_key does not exist' do - it { is_expected.to eq(0) } - end - end - - describe '.chunks_size' do - subject { described_class.chunks_size(job_id) } - - context 'when buffer_key exists' do - before do - described_class.new(buffer_key, params).write!(data) - end - - it { is_expected.to eq(data.length) } - - context 'when two chunks exists' do - let(:buffer_key_2) { described_class.buffer_key(job_id, chunk_index + 1) } - let(:data_2) { 'Another data' } - let(:chunks_size) { data.length + data_2.length } - - before do - described_class.new(buffer_key_2, params).write!(data_2) - end - - it { is_expected.to eq(chunks_size) } - end - end - - context 'when buffer_key does not exist' do - it { is_expected.to eq(0) } - end - end - - describe '.delete_all' do - subject { described_class.delete_all(job_id) } - - context 'when buffer_key exists' do - before do - described_class.new(buffer_key, params).write!(data) - end - - it 'deletes all' do - expect { subject }.to change { described_class.chunks_count(job_id) }.by(-1) - end - - context 'when two chunks exists' do - let(:buffer_key_2) { described_class.buffer_key(job_id, chunk_index + 1) } - let(:data_2) { 'Another data' } - - before do - described_class.new(buffer_key_2, params).write!(data_2) - end - - it 'deletes all' do - expect { subject }.to change { described_class.chunks_count(job_id) }.by(-2) - end - end - end - - context 'when buffer_key does not exist' do - it 'deletes all' do - expect { subject }.not_to change { described_class.chunks_count(job_id) } - end - end - end - - describe '.buffer_key' do - subject { described_class.buffer_key(job_id, chunk_index) } - - it { is_expected.to eq("live_trace_buffer:#{job_id}:#{chunk_index}") } - end - - describe '#get' do - subject { described_class.new(buffer_key, params).get } - - context 'when buffer_key exists' do - before do - described_class.new(buffer_key, params).write!(data) - end - - it { is_expected.to eq(data) } - end - - context 'when buffer_key does not exist' do - it { is_expected.to be_nil } - end - end - - describe '#size' do - subject { described_class.new(buffer_key, params).size } - - context 'when buffer_key exists' do - before do - described_class.new(buffer_key, params).write!(data) - end - - it { is_expected.to eq(data.length) } - end - - context 'when buffer_key does not exist' do - it { is_expected.to eq(0) } - end - end - - describe '#write!' do - subject { described_class.new(buffer_key, params).write!(data) } - - context 'when buffer_key exists' do - before do - described_class.new(buffer_key, params).write!('Already data in the data') - end - - it 'overwrites' do - is_expected.to eq(data.length) - - Gitlab::Redis::Cache.with do |redis| - expect(redis.get(buffer_key)).to eq(data) - end - end - end - - context 'when buffer_key does not exist' do - it 'writes' do - is_expected.to eq(data.length) - - Gitlab::Redis::Cache.with do |redis| - expect(redis.get(buffer_key)).to eq(data) - end - end - end - - context 'when data is nil' do - let(:data) { nil } - - it 'clears value' do - expect { described_class.new(buffer_key, params).write!(data) } - .to raise_error('Could not write empty data') - end - end - end - - describe '#append!' do - subject { described_class.new(buffer_key, params).append!(data) } - - context 'when buffer_key exists' do - let(:written_chunk) { 'Already data in the data' } - - before do - described_class.new(buffer_key, params).write!(written_chunk) - end - - it 'appends' do - is_expected.to eq(data.length) - - Gitlab::Redis::Cache.with do |redis| - expect(redis.get(buffer_key)).to eq(written_chunk + data) - end - end - end - - context 'when buffer_key does not exist' do - it 'raises an error' do - expect { subject }.to raise_error(described_class::BufferKeyNotFoundError) - end - end - - context 'when data is nil' do - let(:data) { nil } - - it 'raises an error' do - expect { subject }.to raise_error('Could not write empty data') - end - end - end - - describe '#delete!' do - subject { described_class.new(buffer_key, params).delete! } - - context 'when buffer_key exists' do - before do - described_class.new(buffer_key, params).write!(data) - end - - it 'deletes' do - Gitlab::Redis::Cache.with do |redis| - expect(redis.exists(buffer_key)).to be_truthy - end - - subject - - Gitlab::Redis::Cache.with do |redis| - expect(redis.exists(buffer_key)).to be_falsy - end - end - end - - context 'when buffer_key does not exist' do - it 'raises an error' do - expect { subject }.to raise_error(described_class::BufferKeyNotFoundError) - end - end - end -end diff --git a/spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb b/spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb deleted file mode 100644 index db0ec074da25..000000000000 --- a/spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb +++ /dev/null @@ -1,32 +0,0 @@ -require 'spec_helper' - -describe Gitlab::Ci::Trace::ChunkedFile::ChunkedIO, :clean_gitlab_redis_cache do - include ChunkedIOHelpers - - let(:chunked_io) { described_class.new(job_id, nil, mode) } - let(:job) { create(:ci_build) } - let(:job_id) { job.id } - let(:mode) { 'rb' } - - describe 'ChunkStore is Redis', :partial_support do - let(:chunk_stores) { [Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis] } - - before do - allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_stores.first) - allow_any_instance_of(described_class).to receive(:buffer_size).and_return(128.kilobytes) - end - - it_behaves_like 'ChunkedIO shared tests' - end - - describe 'ChunkStore is Database' do - let(:chunk_stores) { [Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database] } - - before do - allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_stores.first) - allow_any_instance_of(described_class).to receive(:buffer_size).and_return(128.kilobytes) - end - - it_behaves_like 'ChunkedIO shared tests' - end -end diff --git a/spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb b/spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb deleted file mode 100644 index d19f181f74b7..000000000000 --- a/spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb +++ /dev/null @@ -1,89 +0,0 @@ -require 'spec_helper' - -describe Gitlab::Ci::Trace::ChunkedFile::LiveTrace, :clean_gitlab_redis_cache do - include ChunkedIOHelpers - - let(:chunked_io) { described_class.new(job_id, nil, mode) } - let(:job) { create(:ci_build) } - let(:job_id) { job.id } - let(:mode) { 'rb' } - - let(:chunk_stores) do - [Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, - Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database] - end - - describe 'ChunkStores are Redis and Database', :partial_support do - it_behaves_like 'ChunkedIO shared tests' - end - - describe '.exist?' do - subject { described_class.exist?(job_id) } - - context 'when a chunk exists in a store' do - before do - fill_trace_to_chunks(sample_trace_raw) - end - - it { is_expected.to be_truthy } - end - - context 'when chunks do not exists in any store' do - it { is_expected.to be_falsey } - end - end - - describe '#truncate' do - subject { chunked_io.truncate(offset) } - - let(:mode) { 'a+b' } - - before do - fill_trace_to_chunks(sample_trace_raw) - end - - context 'when offset is 0' do - let(:offset) { 0 } - - it 'deletes all chunks' do - expect { subject }.to change { described_class.exist?(job_id) }.from(true).to(false) - end - end - - context 'when offset is size' do - let(:offset) { sample_trace_raw.bytesize } - - it 'does nothing' do - expect { subject }.not_to change { described_class.exist?(job_id) } - end - end - - context 'when offset is else' do - let(:offset) { 10 } - - it 'raises an error' do - expect { subject }.to raise_error('Unexpected operation') - end - end - end - - describe '#delete' do - subject { chunked_io.delete } - - context 'when a chunk exists in a store' do - before do - fill_trace_to_chunks(sample_trace_raw) - end - - it 'deletes' do - expect { subject }.to change { described_class.exist?(job_id) }.from(true).to(false) - end - end - - context 'when chunks do not exists in any store' do - it 'deletes' do - expect { subject }.not_to change { described_class.exist?(job_id) } - end - end - end -end -- GitLab From 2fac77b0819fc951bb9e896d2615f8a550093707 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Trzci=C5=84ski?= Date: Wed, 4 Apr 2018 12:19:17 +0200 Subject: [PATCH 25/86] Simpler chunking :) --- app/models/ci/build.rb | 2 + app/models/ci/job_trace_chunk.rb | 107 +++++++++ ...180326202229_create_ci_job_trace_chunks.rb | 3 +- db/schema.rb | 3 +- lib/gitlab/ci/trace.rb | 28 +-- lib/gitlab/ci/trace/chunked_io.rb | 216 ++++++++++++++++++ lib/gitlab/ci/trace/http_io.rb | 2 +- lib/gitlab/ci/trace/stream.rb | 3 +- 8 files changed, 344 insertions(+), 20 deletions(-) create mode 100644 lib/gitlab/ci/trace/chunked_io.rb diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb index 4aa65bf42738..b471fb805367 100644 --- a/app/models/ci/build.rb +++ b/app/models/ci/build.rb @@ -25,6 +25,8 @@ class Build < CommitStatus has_one :job_artifacts_metadata, -> { where(file_type: Ci::JobArtifact.file_types[:metadata]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id has_one :job_artifacts_trace, -> { where(file_type: Ci::JobArtifact.file_types[:trace]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id + has_many :chunks, class_name: 'Ci::JobTraceChunk', foreign_key: :job_id, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent + has_one :metadata, class_name: 'Ci::BuildMetadata' delegate :timeout, to: :metadata, prefix: true, allow_nil: true diff --git a/app/models/ci/job_trace_chunk.rb b/app/models/ci/job_trace_chunk.rb index 8998ed920a5f..85b67997d1ed 100644 --- a/app/models/ci/job_trace_chunk.rb +++ b/app/models/ci/job_trace_chunk.rb @@ -3,5 +3,112 @@ class JobTraceChunk < ActiveRecord::Base extend Gitlab::Ci::Model belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id + + after_destroy :redis_delete_data, if: :redis? + + default_value_for :data_store, :redis + + CHUNK_SIZE = 8 + CHUNK_REDIS_TTL = 1.month + + enum data_store: { + redis: 1, + db: 2, + } + + def data + case + when redis? + redis_data + when db? + raw_data + else + raise 'Unsupported data store' + end + end + + def set_data(value) + raise 'too much data' if value.length > CHUNK_SIZE + + case + when redis? + redis_set_data(value) + when db? + self.raw_data = value + else + raise 'Unsupported data store' + end + + save if changed? + schedule_to_db if fullfilled? + end + + def truncate(offset = 0) + self.append("", offset) + end + + def append(new_data, offset) + current_data = self.data || "" + raise 'Outside of if data' if offset > current_data.bytesize + + self.set_data(current_data.byteslice(0, offset) + new_data) + end + + def size + data&.bytesize.to_i + end + + def start_offset + chunk_index * CHUNK_SIZE + end + + def end_offset + start_offset + size + end + + def range + (start_offset...end_offset) + end + + def use_database! + return if db? + + self.update!(raw_data: data, data_store: :db) + redis_delete_data + end + + private + + def schedule_to_db + return if db? + + self.use_database! + end + + def fullfilled? + size == CHUNK_SIZE + end + + def redis_data + Gitlab::Redis::SharedState.with do |redis| + redis.get(redis_key) + end + end + + def redis_set_data(data) + Gitlab::Redis::SharedState.with do |redis| + redis.set(redis_key, data, ex: CHUNK_REDIS_TTL) + end + end + + def redis_delete_data + Gitlab::Redis::SharedState.with do |redis| + redis.del(redis_key) + end + end + + def redis_key + "gitlab:ci:trace:#{job_id}:chunks:#{chunk_index}" + end end end diff --git a/db/migrate/20180326202229_create_ci_job_trace_chunks.rb b/db/migrate/20180326202229_create_ci_job_trace_chunks.rb index f7548cd766e6..70b230a79789 100644 --- a/db/migrate/20180326202229_create_ci_job_trace_chunks.rb +++ b/db/migrate/20180326202229_create_ci_job_trace_chunks.rb @@ -7,7 +7,8 @@ def change create_table :ci_job_trace_chunks do |t| t.integer :job_id, null: false t.integer :chunk_index, null: false - t.text :data + t.integer :data_store, null: false + t.text :raw_data t.foreign_key :ci_builds, column: :job_id, on_delete: :cascade t.index [:chunk_index, :job_id], unique: true diff --git a/db/schema.rb b/db/schema.rb index 54346dadad28..efad5bd6b1c9 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -373,7 +373,8 @@ create_table "ci_job_trace_chunks", force: :cascade do |t| t.integer "job_id", null: false t.integer "chunk_index", null: false - t.text "data" + t.integer "data_store", null: false + t.text "raw_data" end add_index "ci_job_trace_chunks", ["chunk_index", "job_id"], name: "index_ci_job_trace_chunks_on_chunk_index_and_job_id", unique: true, using: :btree diff --git a/lib/gitlab/ci/trace.rb b/lib/gitlab/ci/trace.rb index 3dc4848c23da..e20610963567 100644 --- a/lib/gitlab/ci/trace.rb +++ b/lib/gitlab/ci/trace.rb @@ -54,15 +54,15 @@ def append(data, offset) end def exist? - trace_artifact&.exists? || ChunkedFile::LiveTrace.exist?(job.id) || current_path.present? || old_trace.present? + trace_artifact&.exists? || job.chunks.any? || current_path.present? || old_trace.present? end def read stream = Gitlab::Ci::Trace::Stream.new do if trace_artifact trace_artifact.open - elsif ChunkedFile::LiveTrace.exist?(job.id) - ChunkedFile::LiveTrace.new(job.id, nil, "rb") + elsif job.chunks.any? + Gitlab::Ci::Trace::ChunkedIO.new(job) elsif current_path File.open(current_path, "rb") elsif old_trace @@ -77,12 +77,10 @@ def read def write stream = Gitlab::Ci::Trace::Stream.new do - if Feature.enabled?('ci_enable_live_trace') - if current_path - current_path - else - ChunkedFile::LiveTrace.new(job.id, nil, "a+b") - end + if current_path + current_path + elsif Feature.enabled?('ci_enable_live_trace') + Gitlab::Ci::Trace::ChunkedIO.new(job) else File.open(ensure_path, "a+b") end @@ -102,6 +100,7 @@ def erase! FileUtils.rm(trace_path, force: true) end + job.chunks.destroy_all job.erase_old_trace! end @@ -109,13 +108,10 @@ def archive! raise ArchiveError, 'Already archived' if trace_artifact raise ArchiveError, 'Job is not finished yet' unless job.complete? - if ChunkedFile::LiveTrace.exist?(job.id) - ChunkedFile::LiveTrace.new(job.id, nil, 'a+b') do |live_trace_stream| - StringIO.new(live_trace_stream.read, 'rb').tap do |stream| - archive_stream!(stream) - end - - live_trace_stream.delete + if job.chunks.any? + Gitlab::Ci::Trace::ChunkedIO.new(job) do |stream| + archive_stream!(stream) + stream.destroy! end elsif current_path File.open(current_path) do |stream| diff --git a/lib/gitlab/ci/trace/chunked_io.rb b/lib/gitlab/ci/trace/chunked_io.rb new file mode 100644 index 000000000000..9a27c8494492 --- /dev/null +++ b/lib/gitlab/ci/trace/chunked_io.rb @@ -0,0 +1,216 @@ +## +# This class is compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html) +# source: https://gitlab.com/snippets/1685610 +module Gitlab + module Ci + class Trace + class ChunkedIO + CHUNK_SIZE = ::Ci::JobTraceChunk::CHUNK_SIZE + + FailedToGetChunkError = Class.new(StandardError) + + attr_reader :job + attr_reader :tell, :size + attr_reader :chunk, :chunk_range + + alias_method :pos, :tell + + def initialize(job) + @job = job + @chunks_cache = [] + @tell = 0 + @size = job_chunks.last.try(&:end_offset).to_i + end + + def close + # no-op + end + + def binmode + # no-op + end + + def binmode? + true + end + + def path + nil + end + + def url + nil + end + + def seek(pos, where = IO::SEEK_SET) + new_pos = + case where + when IO::SEEK_END + size + pos + when IO::SEEK_SET + pos + when IO::SEEK_CUR + tell + pos + else + -1 + end + + raise 'new position is outside of file' if new_pos < 0 || new_pos > size + + @tell = new_pos + end + + def eof? + tell == size + end + + def each_line + until eof? + line = readline + break if line.nil? + + yield(line) + end + end + + def read(length = nil) + out = "" + + until eof? || (length && out.length >= length) + data = chunk_slice_from_offset + break if data.empty? + + out << data + @tell += data.bytesize + end + + out = out[0, length] if length && out.length > length + + out + end + + def readline + out = "" + + until eof? + data = chunk_slice_from_offset + new_line = data.index("\n") + + if !new_line.nil? + out << data[0..new_line] + @tell += new_line + 1 + break + else + out << data + @tell += data.bytesize + end + end + + out + end + + def write(data) + start_pos = @tell + + while @tell < start_pos + data.bytesize + # get slice from current offset till the end where it falls into chunk + chunk_bytes = CHUNK_SIZE - chunk_offset + chunk_data = data.byteslice(@tell - start_pos, chunk_bytes) + + # append data to chunk, overwriting from that point + ensure_chunk.append(chunk_data, chunk_offset) + + # move offsets within buffer + @tell += chunk_bytes + @size = [@size, @tell].max + end + end + + def truncate(offset) + raise 'Outside of file' if offset > size + + @tell = offset + @size = offset + invalidate_chunk_cache + + # remove all next chunks + job_chunks.where('chunk_index > ?', chunk_index).destroy_all + + # truncate current chunk + current_chunk.truncate(chunk_offset) if chunk_offset != 0 + end + + def flush + # no-op + end + + def present? + true + end + + def destroy! + job_chunks.destroy_all + invalidate_chunk_cache + end + + private + + ## + # The below methods are not implemented in IO class + # + def in_range? + @chunk_range&.include?(tell) + end + + def chunk_slice_from_offset + unless in_range? + current_chunk.tap do |chunk| + raise FailedToGetChunkError unless chunk + + @chunk = chunk.data.force_encoding(Encoding::BINARY) + @chunk_range = chunk.range + end + end + + @chunk.byteslice(chunk_offset, CHUNK_SIZE) + end + + def chunk_offset + tell % CHUNK_SIZE + end + + def chunk_index + tell / CHUNK_SIZE + end + + def chunk_start + chunk_index * CHUNK_SIZE + end + + def chunk_end + [chunk_start + CHUNK_SIZE, size].min + end + + def invalidate_chunk_cache + @chunks_cache = [] + end + + def current_chunk + @chunks_cache[chunk_index] ||= job_chunks.find_by(chunk_index: chunk_index) + end + + def build_chunk + @chunks_cache[chunk_index] = Ci::JobTraceChunk.new(job: job, chunk_index: chunk_index) + end + + def ensure_chunk + current_chunk || build_chunk + end + + def job_chunks + Ci::JobTraceChunk.where(job: job) + end + end + end + end +end diff --git a/lib/gitlab/ci/trace/http_io.rb b/lib/gitlab/ci/trace/http_io.rb index ac4308f4e2cb..df32693ca9f5 100644 --- a/lib/gitlab/ci/trace/http_io.rb +++ b/lib/gitlab/ci/trace/http_io.rb @@ -161,7 +161,7 @@ def get_chunk @chunk_range ||= (chunk_start...(chunk_start + @chunk.length)) end - @chunk[chunk_offset..BUFFER_SIZE] + @chunk.byteslice(chunk_offset, BUFFER_SIZE) end def request diff --git a/lib/gitlab/ci/trace/stream.rb b/lib/gitlab/ci/trace/stream.rb index b3fe3ef1c4d0..6cd791df42b3 100644 --- a/lib/gitlab/ci/trace/stream.rb +++ b/lib/gitlab/ci/trace/stream.rb @@ -40,8 +40,9 @@ def append(data, offset) end def set(data) - truncate(0) + stream.seek(0, IO::SEEK_SET) stream.write(data) + stream.truncate(data.bytesize) stream.flush() end -- GitLab From e790cc7a1d8e48f5b1bd1a02a8a5b184465b9604 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Thu, 5 Apr 2018 00:19:28 +0900 Subject: [PATCH 26/86] Fix migration file and schema --- .../20180326202229_create_ci_job_trace_chunks.rb | 4 ++-- db/schema.rb | 14 ++------------ 2 files changed, 4 insertions(+), 14 deletions(-) diff --git a/db/migrate/20180326202229_create_ci_job_trace_chunks.rb b/db/migrate/20180326202229_create_ci_job_trace_chunks.rb index 70b230a79789..abfaea9f54a1 100644 --- a/db/migrate/20180326202229_create_ci_job_trace_chunks.rb +++ b/db/migrate/20180326202229_create_ci_job_trace_chunks.rb @@ -4,14 +4,14 @@ class CreateCiJobTraceChunks < ActiveRecord::Migration DOWNTIME = false def change - create_table :ci_job_trace_chunks do |t| + create_table :ci_job_trace_chunks, id: :bigserial do |t| t.integer :job_id, null: false t.integer :chunk_index, null: false t.integer :data_store, null: false t.text :raw_data t.foreign_key :ci_builds, column: :job_id, on_delete: :cascade - t.index [:chunk_index, :job_id], unique: true + t.index [:job_id, :chunk_index], unique: true end end end diff --git a/db/schema.rb b/db/schema.rb index efad5bd6b1c9..cf26c17d5f35 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -370,22 +370,14 @@ add_index "ci_job_artifacts", ["job_id", "file_type"], name: "index_ci_job_artifacts_on_job_id_and_file_type", unique: true, using: :btree add_index "ci_job_artifacts", ["project_id"], name: "index_ci_job_artifacts_on_project_id", using: :btree - create_table "ci_job_trace_chunks", force: :cascade do |t| + create_table "ci_job_trace_chunks", id: :bigserial, force: :cascade do |t| t.integer "job_id", null: false t.integer "chunk_index", null: false t.integer "data_store", null: false t.text "raw_data" end - add_index "ci_job_trace_chunks", ["chunk_index", "job_id"], name: "index_ci_job_trace_chunks_on_chunk_index_and_job_id", unique: true, using: :btree - - create_table "ci_pipeline_chat_data", id: :bigserial, force: :cascade do |t| - t.integer "pipeline_id", null: false - t.integer "chat_name_id", null: false - t.text "response_url", null: false - end - - add_index "ci_pipeline_chat_data", ["pipeline_id"], name: "index_ci_pipeline_chat_data_on_pipeline_id", unique: true, using: :btree + add_index "ci_job_trace_chunks", ["job_id", "chunk_index"], name: "index_ci_job_trace_chunks_on_job_id_and_chunk_index", unique: true, using: :btree create_table "ci_pipeline_schedule_variables", force: :cascade do |t| t.string "key", null: false @@ -2062,8 +2054,6 @@ add_foreign_key "ci_job_artifacts", "ci_builds", column: "job_id", on_delete: :cascade add_foreign_key "ci_job_artifacts", "projects", on_delete: :cascade add_foreign_key "ci_job_trace_chunks", "ci_builds", column: "job_id", on_delete: :cascade - add_foreign_key "ci_pipeline_chat_data", "chat_names", on_delete: :cascade - add_foreign_key "ci_pipeline_chat_data", "ci_pipelines", column: "pipeline_id", on_delete: :cascade add_foreign_key "ci_pipeline_schedule_variables", "ci_pipeline_schedules", column: "pipeline_schedule_id", name: "fk_41c35fda51", on_delete: :cascade add_foreign_key "ci_pipeline_schedules", "projects", name: "fk_8ead60fcc4", on_delete: :cascade add_foreign_key "ci_pipeline_schedules", "users", column: "owner_id", name: "fk_9ea99f58d2", on_delete: :nullify -- GitLab From 26fec9d460163373043e56c13ae4518c50e98367 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Thu, 5 Apr 2018 03:16:51 +0900 Subject: [PATCH 27/86] Fix #read to increament tell correctly --- app/models/ci/job_trace_chunk.rb | 2 +- lib/gitlab/ci/trace/chunked_io.rb | 30 +++++++++++++++++++----------- 2 files changed, 20 insertions(+), 12 deletions(-) diff --git a/app/models/ci/job_trace_chunk.rb b/app/models/ci/job_trace_chunk.rb index 85b67997d1ed..329f2fd01ae2 100644 --- a/app/models/ci/job_trace_chunk.rb +++ b/app/models/ci/job_trace_chunk.rb @@ -8,7 +8,7 @@ class JobTraceChunk < ActiveRecord::Base default_value_for :data_store, :redis - CHUNK_SIZE = 8 + CHUNK_SIZE = 32.kilobytes CHUNK_REDIS_TTL = 1.month enum data_store: { diff --git a/lib/gitlab/ci/trace/chunked_io.rb b/lib/gitlab/ci/trace/chunked_io.rb index 9a27c8494492..46916e837f9b 100644 --- a/lib/gitlab/ci/trace/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_io.rb @@ -15,11 +15,12 @@ class ChunkedIO alias_method :pos, :tell - def initialize(job) + def initialize(job, &block) @job = job @chunks_cache = [] @tell = 0 @size = job_chunks.last.try(&:end_offset).to_i + yield self if block_given? end def close @@ -73,18 +74,25 @@ def each_line end end - def read(length = nil) + def read(length = (size - tell), outbuf = "") out = "" + end_tell = [tell + length, size].min - until eof? || (length && out.length >= length) + until end_tell <= tell data = chunk_slice_from_offset break if data.empty? + data = data[0, (length % CHUNK_SIZE)] if data.bytesize + tell >= end_tell + out << data @tell += data.bytesize end - out = out[0, length] if length && out.length > length + # TODO: If we support `IO::copy_stream`, outbuf must be handled properly + # if outbuf + # outbuf.slice!(/.*/) + # outbuf << out + # end out end @@ -110,19 +118,19 @@ def readline end def write(data) - start_pos = @tell + start_pos = tell - while @tell < start_pos + data.bytesize + while tell < start_pos + data.bytesize # get slice from current offset till the end where it falls into chunk chunk_bytes = CHUNK_SIZE - chunk_offset - chunk_data = data.byteslice(@tell - start_pos, chunk_bytes) + chunk_data = data.byteslice(tell - start_pos, chunk_bytes) # append data to chunk, overwriting from that point ensure_chunk.append(chunk_data, chunk_offset) # move offsets within buffer @tell += chunk_bytes - @size = [@size, @tell].max + @size = [size, tell].max end end @@ -172,7 +180,7 @@ def chunk_slice_from_offset end end - @chunk.byteslice(chunk_offset, CHUNK_SIZE) + @chunk[chunk_offset..CHUNK_SIZE] end def chunk_offset @@ -200,7 +208,7 @@ def current_chunk end def build_chunk - @chunks_cache[chunk_index] = Ci::JobTraceChunk.new(job: job, chunk_index: chunk_index) + @chunks_cache[chunk_index] = ::Ci::JobTraceChunk.new(job: job, chunk_index: chunk_index) end def ensure_chunk @@ -208,7 +216,7 @@ def ensure_chunk end def job_chunks - Ci::JobTraceChunk.where(job: job) + ::Ci::JobTraceChunk.where(job: job) end end end -- GitLab From 8630249f444a02e1800c075fa1cb2fd1d6beccf3 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Thu, 5 Apr 2018 15:26:57 +0900 Subject: [PATCH 28/86] Support IO.copy_stream --- lib/gitlab/ci/trace/chunked_io.rb | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/gitlab/ci/trace/chunked_io.rb b/lib/gitlab/ci/trace/chunked_io.rb index 46916e837f9b..653c92358ad5 100644 --- a/lib/gitlab/ci/trace/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_io.rb @@ -88,11 +88,11 @@ def read(length = (size - tell), outbuf = "") @tell += data.bytesize end - # TODO: If we support `IO::copy_stream`, outbuf must be handled properly - # if outbuf - # outbuf.slice!(/.*/) - # outbuf << out - # end + # If outbuf is passed, we put the output into the buffer. This supports IO.copy_stream functionality + if outbuf + outbuf.slice!(0, outbuf.bytesize) + outbuf << out + end out end -- GitLab From 908aad0a5c6843031a6ccdee69ec0eac9d832bc1 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Thu, 5 Apr 2018 15:54:55 +0900 Subject: [PATCH 29/86] Revert httpio fix --- lib/gitlab/ci/trace/http_io.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/gitlab/ci/trace/http_io.rb b/lib/gitlab/ci/trace/http_io.rb index df32693ca9f5..ac4308f4e2cb 100644 --- a/lib/gitlab/ci/trace/http_io.rb +++ b/lib/gitlab/ci/trace/http_io.rb @@ -161,7 +161,7 @@ def get_chunk @chunk_range ||= (chunk_start...(chunk_start + @chunk.length)) end - @chunk.byteslice(chunk_offset, BUFFER_SIZE) + @chunk[chunk_offset..BUFFER_SIZE] end def request -- GitLab From 14ec3adfe4084887c4146f162d1a0117a93c3edc Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Thu, 5 Apr 2018 16:16:01 +0900 Subject: [PATCH 30/86] Use w+b mode when Trace#set --- lib/gitlab/ci/trace.rb | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/gitlab/ci/trace.rb b/lib/gitlab/ci/trace.rb index e20610963567..6554c924e5c3 100644 --- a/lib/gitlab/ci/trace.rb +++ b/lib/gitlab/ci/trace.rb @@ -36,7 +36,7 @@ def extract_sections end def set(data) - write do |stream| + write('w+b') do |stream| data = job.hide_secrets(data) stream.set(data) end @@ -75,14 +75,14 @@ def read stream&.close end - def write + def write(mode = 'a+b') stream = Gitlab::Ci::Trace::Stream.new do if current_path - current_path + File.open(current_path, mode) elsif Feature.enabled?('ci_enable_live_trace') Gitlab::Ci::Trace::ChunkedIO.new(job) else - File.open(ensure_path, "a+b") + File.open(ensure_path, mode) end end -- GitLab From 54aec65e3ed3897d0f7a62bb0df1d55e7caa5023 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Thu, 5 Apr 2018 16:19:32 +0900 Subject: [PATCH 31/86] Add a gurad logic for appending oversized data --- app/models/ci/job_trace_chunk.rb | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/app/models/ci/job_trace_chunk.rb b/app/models/ci/job_trace_chunk.rb index 329f2fd01ae2..1878e75153db 100644 --- a/app/models/ci/job_trace_chunk.rb +++ b/app/models/ci/job_trace_chunk.rb @@ -28,7 +28,7 @@ def data end def set_data(value) - raise 'too much data' if value.length > CHUNK_SIZE + raise 'too much data' if value.bytesize > CHUNK_SIZE case when redis? @@ -49,7 +49,8 @@ def truncate(offset = 0) def append(new_data, offset) current_data = self.data || "" - raise 'Outside of if data' if offset > current_data.bytesize + raise 'Offset is out of bound' if offset > current_data.bytesize + raise 'Outside of chunk size' if CHUNK_SIZE < offset + new_data.bytesize self.set_data(current_data.byteslice(0, offset) + new_data) end -- GitLab From 446086f18c83dbfe12675a6f186e82cc04bac0f3 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Thu, 5 Apr 2018 16:57:28 +0900 Subject: [PATCH 32/86] Add sidekiq worker for writing operation --- app/models/ci/job_trace_chunk.rb | 2 +- app/workers/all_queues.yml | 1 + app/workers/stash_trace_chunk_worker.rb | 12 ++++++++++++ 3 files changed, 14 insertions(+), 1 deletion(-) create mode 100644 app/workers/stash_trace_chunk_worker.rb diff --git a/app/models/ci/job_trace_chunk.rb b/app/models/ci/job_trace_chunk.rb index 1878e75153db..aa92bd7e3c15 100644 --- a/app/models/ci/job_trace_chunk.rb +++ b/app/models/ci/job_trace_chunk.rb @@ -83,7 +83,7 @@ def use_database! def schedule_to_db return if db? - self.use_database! + StashTraceChunkWorker.perform_async(id) end def fullfilled? diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml index 9a11cdb121e6..682346dd00ae 100644 --- a/app/workers/all_queues.yml +++ b/app/workers/all_queues.yml @@ -62,6 +62,7 @@ - pipeline_processing:pipeline_update - pipeline_processing:stage_update - pipeline_processing:update_head_pipeline_for_merge_request +- pipeline_processing:stash_trace_chunk - repository_check:repository_check_clear - repository_check:repository_check_single_repository diff --git a/app/workers/stash_trace_chunk_worker.rb b/app/workers/stash_trace_chunk_worker.rb new file mode 100644 index 000000000000..3056907f7680 --- /dev/null +++ b/app/workers/stash_trace_chunk_worker.rb @@ -0,0 +1,12 @@ +class StashTraceChunkWorker + include ApplicationWorker + include PipelineQueue + + queue_namespace :pipeline_processing + + def perform(job_trace_chunk_id) + Ci::JobTraceChunk.find_by(id: job_trace_chunk_id).try do |job_trace_chunk| + job_trace_chunk.use_database! + end + end +end -- GitLab From 3c6ab8997bb3b5c1b6a48bef744d5b528d05de61 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Thu, 5 Apr 2018 17:06:07 +0900 Subject: [PATCH 33/86] Remove unrelated specs --- spec/support/chunked_io/chunked_io_helpers.rb | 32 -- .../chunked_io_shared_examples.rb | 454 ------------------ 2 files changed, 486 deletions(-) delete mode 100644 spec/support/chunked_io/chunked_io_helpers.rb delete mode 100644 spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb diff --git a/spec/support/chunked_io/chunked_io_helpers.rb b/spec/support/chunked_io/chunked_io_helpers.rb deleted file mode 100644 index e9dd2e775386..000000000000 --- a/spec/support/chunked_io/chunked_io_helpers.rb +++ /dev/null @@ -1,32 +0,0 @@ -module ChunkedIOHelpers - def fill_trace_to_chunks(data) - stream = described_class.new(job_id, nil, 'a+b') - stream.write(data) - stream.close - end - - def sample_trace_raw - # ChunkStore::Database doesn't support appending, so the test data size has to be least common multiple - if chunk_stores.first == Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database - '01234567' * 32 # 256 bytes - else - File.read(expand_fixture_path('trace/sample_trace')) - end - end - - def set_smaller_buffer_size_than(file_size) - blocks = (file_size / 128) - new_size = (blocks / 2) * 128 - allow_any_instance_of(described_class).to receive(:buffer_size).and_return(new_size) - end - - def set_larger_buffer_size_than(file_size) - blocks = (file_size / 128) - new_size = (blocks * 2) * 128 - allow_any_instance_of(described_class).to receive(:buffer_size).and_return(new_size) - end - - def set_half_buffer_size_of(file_size) - allow_any_instance_of(described_class).to receive(:buffer_size).and_return(file_size / 2) - end -end diff --git a/spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb deleted file mode 100644 index 48828101154f..000000000000 --- a/spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb +++ /dev/null @@ -1,454 +0,0 @@ -shared_examples "ChunkedIO shared tests" do - around(:each, :partial_support) do |example| - example.run if chunk_stores.first == Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis - end - - describe '#new' do - context 'when mode is read' do - let(:mode) { 'rb' } - - it 'raises no exception' do - expect { described_class.new(job_id, nil, mode) }.not_to raise_error - expect { described_class.new(job_id, nil, mode) }.not_to raise_error - end - end - - context 'when mode is append' do - let(:mode) { 'a+b' } - - it 'raises an exception' do - expect { described_class.new(job_id, nil, mode) }.not_to raise_error - expect { described_class.new(job_id, nil, mode) }.to raise_error('Already opened by another process') - end - - context 'when closed after open' do - it 'does not raise an exception' do - expect { described_class.new(job_id, nil, mode).close }.not_to raise_error - expect { described_class.new(job_id, nil, mode) }.not_to raise_error - end - end - end - - context 'when mode is write' do - let(:mode) { 'wb' } - - it 'raises no exception' do - expect { described_class.new(job_id, nil, mode) }.to raise_error("Mode 'w' is not supported") - end - end - end - - describe 'Permissions', :partial_support do - before do - fill_trace_to_chunks(sample_trace_raw) - end - - context "when mode is 'a+b'" do - let(:mode) { 'a+b' } - - it 'can write' do - expect { described_class.new(job_id, nil, mode).write('abc') } - .not_to raise_error - end - - it 'can read' do - expect { described_class.new(job_id, nil, mode).read(10) } - .not_to raise_error - end - end - - context "when mode is 'ab'" do - let(:mode) { 'ab' } - - it 'can write' do - expect { described_class.new(job_id, nil, mode).write('abc') } - .not_to raise_error - end - - it 'can not read' do - expect { described_class.new(job_id, nil, mode).read(10) } - .to raise_error('not opened for reading') - end - end - - context "when mode is 'rb'" do - let(:mode) { 'rb' } - - it 'can not write' do - expect { described_class.new(job_id, nil, mode).write('abc') } - .to raise_error('not opened for writing') - end - - it 'can read' do - expect { described_class.new(job_id, nil, mode).read(10) } - .not_to raise_error - end - end - end - - describe '#seek' do - subject { chunked_io.seek(pos, where) } - - before do - set_smaller_buffer_size_than(sample_trace_raw.bytesize) - fill_trace_to_chunks(sample_trace_raw) - end - - context 'when moves pos to end of the file' do - let(:pos) { 0 } - let(:where) { IO::SEEK_END } - - it { is_expected.to eq(sample_trace_raw.bytesize) } - end - - context 'when moves pos to middle of the file' do - let(:pos) { sample_trace_raw.bytesize / 2 } - let(:where) { IO::SEEK_SET } - - it { is_expected.to eq(pos) } - end - - context 'when moves pos around' do - it 'matches the result' do - expect(chunked_io.seek(0)).to eq(0) - expect(chunked_io.seek(100, IO::SEEK_CUR)).to eq(100) - expect { chunked_io.seek(sample_trace_raw.bytesize + 1, IO::SEEK_CUR) } - .to raise_error('new position is outside of file') - end - end - end - - describe '#eof?' do - subject { chunked_io.eof? } - - before do - set_smaller_buffer_size_than(sample_trace_raw.bytesize) - fill_trace_to_chunks(sample_trace_raw) - end - - context 'when current pos is at end of the file' do - before do - chunked_io.seek(sample_trace_raw.bytesize, IO::SEEK_SET) - end - - it { is_expected.to be_truthy } - end - - context 'when current pos is not at end of the file' do - before do - chunked_io.seek(0, IO::SEEK_SET) - end - - it { is_expected.to be_falsey } - end - end - - describe '#each_line' do - let(:string_io) { StringIO.new(sample_trace_raw) } - - context 'when buffer size is smaller than file size' do - before do - set_smaller_buffer_size_than(sample_trace_raw.bytesize) - fill_trace_to_chunks(sample_trace_raw) - end - - it 'yields lines' do - expect { |b| described_class.new(job_id, nil, 'rb').each_line(&b) } - .to yield_successive_args(*string_io.each_line.to_a) - end - end - - context 'when buffer size is larger than file size', :partial_support do - before do - set_larger_buffer_size_than(sample_trace_raw.bytesize) - fill_trace_to_chunks(sample_trace_raw) - end - - it 'calls get_chunk only once' do - expect(chunk_stores.first).to receive(:open).once.and_call_original - - described_class.new(job_id, nil, 'rb').each_line { |line| } - end - end - end - - describe '#read' do - subject { described_class.new(job_id, nil, 'rb').read(length) } - - context 'when read the whole size' do - let(:length) { nil } - - shared_examples 'reads a trace' do - it do - is_expected.to eq(sample_trace_raw) - end - end - - context 'when buffer size is smaller than file size' do - before do - set_smaller_buffer_size_than(sample_trace_raw.bytesize) - fill_trace_to_chunks(sample_trace_raw) - end - - it_behaves_like 'reads a trace' - end - - context 'when buffer size is larger than file size', :partial_support do - before do - set_larger_buffer_size_than(sample_trace_raw.bytesize) - fill_trace_to_chunks(sample_trace_raw) - end - - it_behaves_like 'reads a trace' - end - - context 'when buffer size is half of file size' do - before do - set_half_buffer_size_of(sample_trace_raw.bytesize) - fill_trace_to_chunks(sample_trace_raw) - end - - it_behaves_like 'reads a trace' - end - end - - context 'when read only first 100 bytes' do - let(:length) { 100 } - - context 'when buffer size is smaller than file size' do - before do - set_smaller_buffer_size_than(sample_trace_raw.bytesize) - fill_trace_to_chunks(sample_trace_raw) - end - - it 'reads a trace' do - is_expected.to eq(sample_trace_raw[0, length]) - end - end - - context 'when buffer size is larger than file size', :partial_support do - before do - set_larger_buffer_size_than(sample_trace_raw.bytesize) - fill_trace_to_chunks(sample_trace_raw) - end - - it 'reads a trace' do - is_expected.to eq(sample_trace_raw[0, length]) - end - end - end - - context 'when tries to read oversize' do - let(:length) { sample_trace_raw.bytesize + 1000 } - - context 'when buffer size is smaller than file size' do - before do - set_smaller_buffer_size_than(sample_trace_raw.bytesize) - fill_trace_to_chunks(sample_trace_raw) - end - - it 'reads a trace' do - is_expected.to eq(sample_trace_raw) - end - end - - context 'when buffer size is larger than file size', :partial_support do - before do - set_larger_buffer_size_than(sample_trace_raw.bytesize) - fill_trace_to_chunks(sample_trace_raw) - end - - it 'reads a trace' do - is_expected.to eq(sample_trace_raw) - end - end - end - - context 'when tries to read 0 bytes' do - let(:length) { 0 } - - context 'when buffer size is smaller than file size' do - before do - set_smaller_buffer_size_than(sample_trace_raw.bytesize) - fill_trace_to_chunks(sample_trace_raw) - end - - it 'reads a trace' do - is_expected.to be_empty - end - end - - context 'when buffer size is larger than file size', :partial_support do - before do - set_larger_buffer_size_than(sample_trace_raw.bytesize) - fill_trace_to_chunks(sample_trace_raw) - end - - it 'reads a trace' do - is_expected.to be_empty - end - end - end - end - - describe '#readline' do - subject { chunked_io.readline } - - let(:string_io) { StringIO.new(sample_trace_raw) } - - shared_examples 'all line matching' do - it do - (0...sample_trace_raw.lines.count).each do - expect(chunked_io.readline).to eq(string_io.readline) - end - end - end - - context 'when buffer size is smaller than file size' do - before do - set_smaller_buffer_size_than(sample_trace_raw.bytesize) - fill_trace_to_chunks(sample_trace_raw) - end - - it_behaves_like 'all line matching' - end - - context 'when buffer size is larger than file size', :partial_support do - before do - set_larger_buffer_size_than(sample_trace_raw.bytesize) - fill_trace_to_chunks(sample_trace_raw) - end - - it_behaves_like 'all line matching' - end - - context 'when buffer size is half of file size' do - before do - set_half_buffer_size_of(sample_trace_raw.bytesize) - fill_trace_to_chunks(sample_trace_raw) - end - - it_behaves_like 'all line matching' - end - - context 'when pos is at middle of the file' do - before do - set_smaller_buffer_size_than(sample_trace_raw.bytesize) - fill_trace_to_chunks(sample_trace_raw) - - chunked_io.seek(chunked_io.size / 2) - string_io.seek(string_io.size / 2) - end - - it 'reads from pos' do - expect(chunked_io.readline).to eq(string_io.readline) - end - end - end - - describe '#write' do - subject { chunked_io.write(data) } - - let(:data) { sample_trace_raw } - - context 'when append mode' do - let(:mode) { 'a+b' } - - context 'when data does not exist' do - shared_examples 'writes a trace' do - it do - is_expected.to eq(data.bytesize) - - described_class.new(job_id, nil, 'rb') do |stream| - expect(stream.read).to eq(data) - expect(chunk_stores.inject(0) { |sum, store| sum + store.chunks_count(job_id) }) - .to eq(stream.send(:chunks_count)) - expect(chunk_stores.inject(0) { |sum, store| sum + store.chunks_size(job_id) }) - .to eq(data.bytesize) - end - end - end - - context 'when buffer size is smaller than file size' do - before do - set_smaller_buffer_size_than(data.bytesize) - end - - it_behaves_like 'writes a trace' - end - - context 'when buffer size is larger than file size', :partial_support do - before do - set_larger_buffer_size_than(data.bytesize) - end - - it_behaves_like 'writes a trace' - end - - context 'when buffer size is half of file size' do - before do - set_half_buffer_size_of(data.bytesize) - end - - it_behaves_like 'writes a trace' - end - - context 'when data is nil' do - let(:data) { nil } - - it 'writes a trace' do - expect { subject } .to raise_error('Could not write empty data') - end - end - end - - context 'when data already exists', :partial_support do - let(:exist_data) { 'exist data' } - let(:total_size) { exist_data.bytesize + data.bytesize } - - shared_examples 'appends a trace' do - it do - described_class.new(job_id, nil, 'a+b') do |stream| - expect(stream.write(data)).to eq(data.bytesize) - end - - described_class.new(job_id, nil, 'rb') do |stream| - expect(stream.read).to eq(exist_data + data) - expect(chunk_stores.inject(0) { |sum, store| sum + store.chunks_count(job_id) }) - .to eq(stream.send(:chunks_count)) - expect(chunk_stores.inject(0) { |sum, store| sum + store.chunks_size(job_id) }) - .to eq(total_size) - end - end - end - - context 'when buffer size is smaller than file size' do - before do - set_smaller_buffer_size_than(data.bytesize) - fill_trace_to_chunks(exist_data) - end - - it_behaves_like 'appends a trace' - end - - context 'when buffer size is larger than file size', :partial_support do - before do - set_larger_buffer_size_than(data.bytesize) - fill_trace_to_chunks(exist_data) - end - - it_behaves_like 'appends a trace' - end - - context 'when buffer size is half of file size' do - before do - set_half_buffer_size_of(data.bytesize) - fill_trace_to_chunks(exist_data) - end - - it_behaves_like 'appends a trace' - end - end - end - end -end -- GitLab From 9e146233d489894fac32c0d2e81bb3224b6f4ff3 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Thu, 5 Apr 2018 18:59:07 +0900 Subject: [PATCH 34/86] Add job_trace_chunk_spec with small fixes --- app/models/ci/job_trace_chunk.rb | 5 +- spec/factories/ci/job_trace_chunks.rb | 2 + spec/models/ci/job_trace_chunk_spec.rb | 312 +++++++++++++++++++++++++ 3 files changed, 317 insertions(+), 2 deletions(-) create mode 100644 spec/models/ci/job_trace_chunk_spec.rb diff --git a/app/models/ci/job_trace_chunk.rb b/app/models/ci/job_trace_chunk.rb index aa92bd7e3c15..baa48a602f17 100644 --- a/app/models/ci/job_trace_chunk.rb +++ b/app/models/ci/job_trace_chunk.rb @@ -39,7 +39,7 @@ def set_data(value) raise 'Unsupported data store' end - save if changed? + save! if changed? schedule_to_db if fullfilled? end @@ -49,7 +49,7 @@ def truncate(offset = 0) def append(new_data, offset) current_data = self.data || "" - raise 'Offset is out of bound' if offset > current_data.bytesize + raise 'Offset is out of bound' if offset > current_data.bytesize || offset < 0 raise 'Outside of chunk size' if CHUNK_SIZE < offset + new_data.bytesize self.set_data(current_data.byteslice(0, offset) + new_data) @@ -73,6 +73,7 @@ def range def use_database! return if db? + return unless size > 0 self.update!(raw_data: data, data_store: :db) redis_delete_data diff --git a/spec/factories/ci/job_trace_chunks.rb b/spec/factories/ci/job_trace_chunks.rb index f24e015f1861..e2cc2e77dda1 100644 --- a/spec/factories/ci/job_trace_chunks.rb +++ b/spec/factories/ci/job_trace_chunks.rb @@ -1,5 +1,7 @@ FactoryBot.define do factory :ci_job_trace_chunk, class: Ci::JobTraceChunk do job factory: :ci_build + chunk_index 0 + data_store :redis end end diff --git a/spec/models/ci/job_trace_chunk_spec.rb b/spec/models/ci/job_trace_chunk_spec.rb new file mode 100644 index 000000000000..fa316159e1ae --- /dev/null +++ b/spec/models/ci/job_trace_chunk_spec.rb @@ -0,0 +1,312 @@ +require 'spec_helper' + +describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do + set(:job) { create(:ci_build, :running) } + let(:chunk_index) { 0 } + let(:data_store) { :redis } + let(:raw_data) { nil } + let(:job_trace_chunk) do + described_class.new(job: job, chunk_index: chunk_index, data_store: data_store, raw_data: raw_data) + end + + describe '#data' do + subject { job_trace_chunk.data } + + context 'when data_store is redis' do + let(:data_store) { :redis } + + before do + job_trace_chunk.send(:redis_set_data, 'Sample data in redis') + end + + it { is_expected.to eq('Sample data in redis') } + end + + context 'when data_store is database' do + let(:data_store) { :db } + let(:raw_data) { 'Sample data in db' } + + it { is_expected.to eq('Sample data in db') } + end + + context 'when data_store is others' do + before do + job_trace_chunk.send(:write_attribute, :data_store, -1) + end + + it { expect { subject }.to raise_error('Unsupported data store') } + end + end + + describe '#set_data' do + subject { job_trace_chunk.set_data(value) } + + let(:value) { 'Sample data' } + + context 'when value bytesize is bigger than CHUNK_SIZE' do + let(:value) { 'a' * (described_class::CHUNK_SIZE + 1) } + + it { expect { subject }.to raise_error('too much data') } + end + + context 'when data_store is redis' do + let(:data_store) { :redis } + + it do + expect(job_trace_chunk.send(:redis_data)).to be_nil + + subject + + expect(job_trace_chunk.send(:redis_data)).to eq(value) + end + + context 'when fullfilled chunk size' do + let(:value) { 'a' * described_class::CHUNK_SIZE } + + it 'schedules stashing data' do + expect(StashTraceChunkWorker).to receive(:perform_async).once + + subject + end + end + end + + context 'when data_store is database' do + let(:data_store) { :db } + + it 'sets data' do + expect(job_trace_chunk.raw_data).to be_nil + + subject + + expect(job_trace_chunk.raw_data).to eq(value) + expect(job_trace_chunk.persisted?).to be_truthy + end + + context 'when raw_data is not changed' do + it 'does not execute UPDATE' do + expect(job_trace_chunk.raw_data).to be_nil + job_trace_chunk.save! + + # First set + expect(ActiveRecord::QueryRecorder.new { subject }.count).to be > 0 + expect(job_trace_chunk.raw_data).to eq(value) + expect(job_trace_chunk.persisted?).to be_truthy + + # Second set + job_trace_chunk.reload + expect(ActiveRecord::QueryRecorder.new { subject }.count).to be(0) + end + end + + context 'when fullfilled chunk size' do + it 'does not schedule stashing data' do + expect(StashTraceChunkWorker).not_to receive(:perform_async) + + subject + end + end + end + + context 'when data_store is others' do + before do + job_trace_chunk.send(:write_attribute, :data_store, -1) + end + + it { expect { subject }.to raise_error('Unsupported data store') } + end + end + + describe '#truncate' do + subject { job_trace_chunk.truncate(offset) } + + shared_examples_for 'truncates' do + context 'when offset is negative' do + let(:offset) { -1 } + + it { expect { subject }.to raise_error('Offset is out of bound') } + end + + context 'when offset is bigger than data size' do + let(:offset) { data.bytesize + 1 } + + it { expect { subject }.to raise_error('Offset is out of bound') } + end + + context 'when offset is 10' do + let(:offset) { 10 } + + it 'truncates' do + subject + + expect(job_trace_chunk.data).to eq(data.byteslice(0, offset)) + end + end + end + + context 'when data_store is redis' do + let(:data_store) { :redis } + let(:data) { 'Sample data in redis' } + + before do + job_trace_chunk.send(:redis_set_data, data) + end + + it_behaves_like 'truncates' + end + + context 'when data_store is database' do + let(:data_store) { :db } + let(:raw_data) { 'Sample data in db' } + let(:data) { raw_data } + + it_behaves_like 'truncates' + end + end + + describe '#append' do + subject { job_trace_chunk.append(new_data, offset) } + + let(:new_data) { 'Sample new data' } + let(:offset) { 0 } + let(:total_data) { data + new_data } + + shared_examples_for 'appends' do + context 'when offset is negative' do + let(:offset) { -1 } + + it { expect { subject }.to raise_error('Offset is out of bound') } + end + + context 'when offset is bigger than data size' do + let(:offset) { data.bytesize + 1 } + + it { expect { subject }.to raise_error('Offset is out of bound') } + end + + context 'when offset is bigger than data size' do + let(:new_data) { 'a' * (described_class::CHUNK_SIZE + 1) } + + it { expect { subject }.to raise_error('Outside of chunk size') } + end + + context 'when offset is EOF' do + let(:offset) { data.bytesize } + + it 'appends' do + subject + + expect(job_trace_chunk.data).to eq(total_data) + end + end + + context 'when offset is 10' do + let(:offset) { 10 } + + it 'appends' do + subject + + expect(job_trace_chunk.data).to eq(data.byteslice(0, offset) + new_data) + end + end + end + + context 'when data_store is redis' do + let(:data_store) { :redis } + let(:data) { 'Sample data in redis' } + + before do + job_trace_chunk.send(:redis_set_data, data) + end + + it_behaves_like 'appends' + end + + context 'when data_store is database' do + let(:data_store) { :db } + let(:raw_data) { 'Sample data in db' } + let(:data) { raw_data } + + it_behaves_like 'appends' + end + end + + describe '#size' do + subject { job_trace_chunk.size } + + context 'when data_store is redis' do + let(:data_store) { :redis } + + context 'when data exists' do + let(:data) { 'Sample data in redis' } + + before do + job_trace_chunk.send(:redis_set_data, data) + end + + it { is_expected.to eq(data.bytesize) } + end + + context 'when data exists' do + it { is_expected.to eq(0) } + end + end + + context 'when data_store is database' do + let(:data_store) { :db } + + context 'when data exists' do + let(:raw_data) { 'Sample data in db' } + let(:data) { raw_data } + + it { is_expected.to eq(data.bytesize) } + end + + context 'when data does not exist' do + it { is_expected.to eq(0) } + end + end + end + + describe '#use_database!' do + subject { job_trace_chunk.use_database! } + + context 'when data_store is redis' do + let(:data_store) { :redis } + + context 'when data exists' do + let(:data) { 'Sample data in redis' } + + before do + job_trace_chunk.send(:redis_set_data, data) + end + + it 'stashes the data' do + expect(job_trace_chunk.data_store).to eq('redis') + expect(job_trace_chunk.send(:redis_data)).to eq(data) + expect(job_trace_chunk.raw_data).to be_nil + + subject + + expect(job_trace_chunk.data_store).to eq('db') + expect(job_trace_chunk.send(:redis_data)).to be_nil + expect(job_trace_chunk.raw_data).to eq(data) + end + end + + context 'when data does not exist' do + it 'does not call UPDATE' do + expect(ActiveRecord::QueryRecorder.new { subject }.count).to eq(0) + end + end + end + + context 'when data_store is database' do + let(:data_store) { :db } + + it 'does not call UPDATE' do + expect(ActiveRecord::QueryRecorder.new { subject }.count).to eq(0) + end + end + end +end -- GitLab From 242b4afa8fd1ffd0686038ba955f8ad41dc95383 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Thu, 5 Apr 2018 20:39:35 +0900 Subject: [PATCH 35/86] Chnaged Chunk size to 128kb. Add spec. --- app/models/ci/job_trace_chunk.rb | 2 +- spec/models/ci/job_trace_chunk_spec.rb | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/app/models/ci/job_trace_chunk.rb b/app/models/ci/job_trace_chunk.rb index baa48a602f17..7d9ccd5e5add 100644 --- a/app/models/ci/job_trace_chunk.rb +++ b/app/models/ci/job_trace_chunk.rb @@ -8,7 +8,7 @@ class JobTraceChunk < ActiveRecord::Base default_value_for :data_store, :redis - CHUNK_SIZE = 32.kilobytes + CHUNK_SIZE = 128.kilobytes CHUNK_REDIS_TTL = 1.month enum data_store: { diff --git a/spec/models/ci/job_trace_chunk_spec.rb b/spec/models/ci/job_trace_chunk_spec.rb index fa316159e1ae..d3fbec9170b1 100644 --- a/spec/models/ci/job_trace_chunk_spec.rb +++ b/spec/models/ci/job_trace_chunk_spec.rb @@ -9,6 +9,12 @@ described_class.new(job: job, chunk_index: chunk_index, data_store: data_store, raw_data: raw_data) end + describe 'CHUNK_SIZE' do + it 'Chunk size can not be changed without special care' do + expect(described_class::CHUNK_SIZE).to eq(128.kilobytes) + end + end + describe '#data' do subject { job_trace_chunk.data } -- GitLab From f8f62ea5b6e81162cfb8781730040e54993cd847 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Thu, 5 Apr 2018 21:15:18 +0900 Subject: [PATCH 36/86] Rename to SwapTraceChunkWorker from StashTraceChunkWorker --- app/models/ci/job_trace_chunk.rb | 2 +- app/workers/all_queues.yml | 2 +- ...k_worker.rb => swap_trace_chunk_worker.rb} | 2 +- spec/fixtures/trace/sample_trace | 3488 ++++++++++++++++- spec/models/ci/job_trace_chunk_spec.rb | 4 +- 5 files changed, 3432 insertions(+), 66 deletions(-) rename app/workers/{stash_trace_chunk_worker.rb => swap_trace_chunk_worker.rb} (90%) diff --git a/app/models/ci/job_trace_chunk.rb b/app/models/ci/job_trace_chunk.rb index 7d9ccd5e5add..c2f05dd1f039 100644 --- a/app/models/ci/job_trace_chunk.rb +++ b/app/models/ci/job_trace_chunk.rb @@ -84,7 +84,7 @@ def use_database! def schedule_to_db return if db? - StashTraceChunkWorker.perform_async(id) + SwapTraceChunkWorker.perform_async(id) end def fullfilled? diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml index 682346dd00ae..c713e67c70a0 100644 --- a/app/workers/all_queues.yml +++ b/app/workers/all_queues.yml @@ -62,7 +62,7 @@ - pipeline_processing:pipeline_update - pipeline_processing:stage_update - pipeline_processing:update_head_pipeline_for_merge_request -- pipeline_processing:stash_trace_chunk +- pipeline_processing:swap_trace_chunk - repository_check:repository_check_clear - repository_check:repository_check_single_repository diff --git a/app/workers/stash_trace_chunk_worker.rb b/app/workers/swap_trace_chunk_worker.rb similarity index 90% rename from app/workers/stash_trace_chunk_worker.rb rename to app/workers/swap_trace_chunk_worker.rb index 3056907f7680..6b30cfa2a486 100644 --- a/app/workers/stash_trace_chunk_worker.rb +++ b/app/workers/swap_trace_chunk_worker.rb @@ -1,4 +1,4 @@ -class StashTraceChunkWorker +class SwapTraceChunkWorker include ApplicationWorker include PipelineQueue diff --git a/spec/fixtures/trace/sample_trace b/spec/fixtures/trace/sample_trace index c78d2d671e43..5e927e7a147c 100644 --- a/spec/fixtures/trace/sample_trace +++ b/spec/fixtures/trace/sample_trace @@ -1,63 +1,3429 @@ -Running with gitlab-runner 10.0.2 (a9a76a50) - on ShinyaMaedas-MacBook-Pro.local (e1e5600d) -Using Docker executor with image ruby:2.1 ... -Using docker image sha256:35c04f14f9926d1c8c68927cb43f69435fda36ecbaa3ca6f92218205363a2b99 for predefined container... -Pulling docker image ruby:2.1 ... -Using docker image ruby:2.1 ID=sha256:223d1eaa9523fa64e78f5a92b701c9c11cbc507f0ff62246dbbacdae395ffea3 for build container... -Running on runner-e1e5600d-project-64-concurrent-0 via ShinyaMaedasMBP... -Fetching changes... -Removing index.html -HEAD is now at 59a8e85 Update .gitlab-ci.yml -Checking out 59a8e85d as master... +Running with gitlab-runner 10.6.0 (a3543a27) + on docker-auto-scale-com 30d62d59 +Using Docker executor with image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.6-golang-1.9-git-2.16-chrome-63.0-node-8.x-yarn-1.2-postgresql-9.6 ... +Starting service mysql:latest ... +Pulling docker image mysql:latest ... +Using docker image sha256:5195076672a7e30525705a18f7d352c920bbd07a5ae72b30e374081fe660a011 for mysql:latest ... +Starting service redis:alpine ... +Pulling docker image redis:alpine ... +Using docker image sha256:98bd7cfc43b8ef0ff130465e3d5427c0771002c2f35a6a9b62cb2d04602bed0a for redis:alpine ... +Waiting for services to be up and running... +Pulling docker image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.6-golang-1.9-git-2.16-chrome-63.0-node-8.x-yarn-1.2-postgresql-9.6 ... +Using docker image sha256:1b06077bb03d9d42d801b53f45701bb6a7e862ca02e1e75f30ca7fcf1270eb02 for dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.6-golang-1.9-git-2.16-chrome-63.0-node-8.x-yarn-1.2-postgresql-9.6 ... +section_start:1522927103:prepare_script Running on runner-30d62d59-project-13083-concurrent-0 via runner-30d62d59-prm-1522922015-ddc29478... +section_end:1522927104:prepare_script section_start:1522927104:get_sources Fetching changes for master with git depth set to 20... +Removing .gitlab_shell_secret +Removing .gitlab_workhorse_secret +Removing .yarn-cache/ +Removing builds/2018_04/ +Removing config/database.yml +Removing config/gitlab.yml +Removing config/redis.cache.yml +Removing config/redis.queues.yml +Removing config/redis.shared_state.yml +Removing config/resque.yml +Removing config/secrets.yml +Removing log/api_json.log +Removing log/application.log +Removing log/gitaly-test.log +Removing log/grpc.log +Removing log/test_json.log +Removing tmp/tests/ +Removing vendor/ruby/ +HEAD is now at b7cbff3d Add `direct_upload` setting for artifacts +From https://gitlab.com/gitlab-org/gitlab-ce + 2dbcb9cb..641bb13b master -> origin/master +Checking out 21488c74 as master... Skipping Git submodules setup -$ wget https://about.gitlab.com/ -converted 'https://about.gitlab.com/' (ANSI_X3.4-1968) -> 'https://about.gitlab.com/' (UTF-8) ---2018-04-03 06:46:12-- https://about.gitlab.com/ -Resolving about.gitlab.com (about.gitlab.com)... 40.79.82.214 -Connecting to about.gitlab.com (about.gitlab.com)|40.79.82.214|:443... connected. -HTTP request sent, awaiting response... 200 OK -Length: 69778 (68K) [text/html] -Saving to: 'index.html' - - 0K .......... .......... .......... .......... .......... 73% 195K 0s - 50K .......... ........ 100% 37.5M=0.3s - -2018-04-03 06:46:14 (265 KB/s) - 'index.html' saved [69778/69778] - -$ cat index.html - The only single product for the complete DevOps lifecycle - GitLab | GitLab

Concurrent DevOps

A single application built from the ground up for the complete DevOps lifecycle.

Try GitLab for Free

Plan Plan: Get your best ideas into development.

Whether you use Waterfall, Agile, or Conversational Development, GitLab streamlines your collaborative workflows. Visualize, prioritize, coordinate, and track your progress your way with GitLab’s flexible project management tools.

Create Create: Securely write and manage code and project data.

Consolidate source code into a single DVCS that’s easily managed and controlled without disrupting your workflow. GitLab’s git repositories come complete with branching tools and access controls, providing a scalable, single source of truth for collaborating on projects and code.

Verify: Ship better software, faster.

Spot errors sooner and shorten feedback cycles with built-in code review, code testing, code quality, and review apps. Customize your approval workflow controls, automatically test the quality of your code, and spin up a staging environment for every code change. GitLab Continuous Integration is the most popular next generation testing system that auto scales to run your tests faster.

Review Package: Manage custom container images with ease.

GitLab Container Registry gives you the enhanced security and access controls of custom Docker images without 3rd party add-ons. Easily upload and download images from GitLab CI with full Git repository management integration.

Measure Release: Minimize complexity with built-in Continuous Delivery.

Spend less time configuring your tools, and more time creating. Whether you’re deploying to one server or thousands, build, test, and release your code confidently and securely with GitLab’s built-in continuous delivery and deployment.

Configure: Automate configuration management

Automate your entire workflow from build to deploy and monitoring with GitLab Auto Devops. Best practice templates get you started with minimal to zero configuration. Then customize everything from buildpacks to CI/CD.

Release Monitor: Analyze shipping velocity and monitor application performance.

Measure how long it takes to go from planning to monitoring and ensure your applications are always responsive and available. GitLab collects and displays performance metrics for deployed apps using Prometheus so you can know in an instant how code changes impact your production environment.

Open core and continuously improved

With major product releases every 22nd of the month, GitLab’s high-velocity pace of innovation is possible because of the collaboration of the GitLab community, delivering the leading modern software development product, built for today’s modern software developer.

GitLab named a Leader in the Forrester Wave™

Receiving the highest score in Forrester’s Current Offering evaluation, GitLab was named as a Leader in Continuous Integration in The Forrester Wave™: Continuous Integration Tools, Q3 2017 report. According to the report, “GitLab delivers ease of use, scalability, integration, and innovation.”

GitLab is in the Leader’s arc

Try GitLab Enterprise Edition free for 30 days.

GitLab Enterprise Edition is built on the open core of GitLab Community Edition to include additional authentication, user management, and workflow features designed for larger development teams.

No credit card required.

Start your free trial Join a live demo

Concurrent DevOps

Visible

Real time view across the entire lifecycle

  • See everything that matters
  • Stay in your flow
  • Don’t wait on syncing
  • Manage projects, not tools
  • Improve cycle time.

Efficient

Collaborate without waiting

  • Start immediately
  • Work concurrently
  • No more handoffs

Governed

Develop and operate with confidence

  • Security and compliance already built-in
  • Simplify user management
  • Expedite auditing
  • Act with certainty
10.6

New features every month

In this month’s release of GitLab 10.6, we've added CI/CD for GitHub, improved Kubernetes integration, and much more.

Used by more than 100,000 organizations around the globe

  • sony
    Sony
  • comcast
    Comcast
  • nasa
    NASA
  • bayer
    Bayer
  • nasdaq
    Nasdaq
Roger Meier

We decided to use an Open Source based platform so we can participate in the development and contribute to the features and concepts we need such as file size statistics, OpenID Connect, GPG, and Docker Registry. GitLab's built-in Continuous Integration and independent CI Runners allows our developers to integrate very specific environments, boosting productivity and increasing developer satisfaction.

Roger Meier, Social Coding Platform, Siemens
GitLab.com screenshot

GitLab.com

Host and explore projects

Sign up
GitLab Navigation screenshot

Blog

Global Developer Report: 2018 is the year for open source and DevOps

Read more
IBM Think Logo

Event

Join us at IBM Think March 19—22

View all events
GitLab Enterprise screenshot

Demo

Inside GitLab Enterprise

Register now
Job succeeded +section_end:1522927113:get_sources section_start:1522927113:restore_cache Checking cache for ruby-2.3.6-with-yarn... +Downloading cache.zip from http://runners-cache-5-internal.gitlab.com:444/runner/project/13083/ruby-2.3.6-with-yarn +Successfully extracted cache +section_end:1522927128:restore_cache section_start:1522927128:download_artifacts Downloading artifacts for retrieve-tests-metadata (61303215)... +Downloading artifacts from coordinator... ok  id=61303215 responseStatus=200 OK token=AdWPNg2R +Downloading artifacts for compile-assets (61303216)... +Downloading artifacts from coordinator... ok  id=61303216 responseStatus=200 OK token=iy2yYbq8 +Downloading artifacts for setup-test-env (61303217)... +Downloading artifacts from coordinator... ok  id=61303217 responseStatus=200 OK token=ur1g79-4 +WARNING: tmp/tests/gitlab-shell/.gitlab_shell_secret: chmod tmp/tests/gitlab-shell/.gitlab_shell_secret: no such file or directory (suppressing repeats) +section_end:1522927141:download_artifacts section_start:1522927141:build_script $ bundle --version +Bundler version 1.16.1 +$ date +Thu Apr 5 11:19:01 UTC 2018 +$ source scripts/utils.sh +$ date +Thu Apr 5 11:19:01 UTC 2018 +$ source scripts/prepare_build.sh +The Gemfile's dependencies are satisfied +Successfully installed knapsack-1.16.0 +1 gem installed +-- enable_extension("plpgsql") + -> 0.0010s +-- enable_extension("pg_trgm") + -> 0.0000s +-- create_table("abuse_reports", {:force=>:cascade}) + -> 0.0401s +-- create_table("appearances", {:force=>:cascade}) + -> 0.1035s +-- create_table("application_settings", {:force=>:cascade}) + -> 0.0871s +-- create_table("audit_events", {:force=>:cascade}) + -> 0.0539s +-- add_index("audit_events", ["entity_id", "entity_type"], {:name=>"index_audit_events_on_entity_id_and_entity_type", :using=>:btree}) + -> 0.0647s +-- create_table("award_emoji", {:force=>:cascade}) + -> 0.0134s +-- add_index("award_emoji", ["awardable_type", "awardable_id"], {:name=>"index_award_emoji_on_awardable_type_and_awardable_id", :using=>:btree}) + -> 0.0074s +-- add_index("award_emoji", ["user_id", "name"], {:name=>"index_award_emoji_on_user_id_and_name", :using=>:btree}) + -> 0.0072s +-- create_table("badges", {:force=>:cascade}) + -> 0.0122s +-- add_index("badges", ["group_id"], {:name=>"index_badges_on_group_id", :using=>:btree}) + -> 0.0086s +-- add_index("badges", ["project_id"], {:name=>"index_badges_on_project_id", :using=>:btree}) + -> 0.0069s +-- create_table("boards", {:force=>:cascade}) + -> 0.0075s +-- add_index("boards", ["group_id"], {:name=>"index_boards_on_group_id", :using=>:btree}) + -> 0.0050s +-- add_index("boards", ["project_id"], {:name=>"index_boards_on_project_id", :using=>:btree}) + -> 0.0051s +-- create_table("broadcast_messages", {:force=>:cascade}) + -> 0.0082s +-- add_index("broadcast_messages", ["starts_at", "ends_at", "id"], {:name=>"index_broadcast_messages_on_starts_at_and_ends_at_and_id", :using=>:btree}) + -> 0.0063s +-- create_table("chat_names", {:force=>:cascade}) + -> 0.0084s +-- add_index("chat_names", ["service_id", "team_id", "chat_id"], {:name=>"index_chat_names_on_service_id_and_team_id_and_chat_id", :unique=>true, :using=>:btree}) + -> 0.0088s +-- add_index("chat_names", ["user_id", "service_id"], {:name=>"index_chat_names_on_user_id_and_service_id", :unique=>true, :using=>:btree}) + -> 0.0077s +-- create_table("chat_teams", {:force=>:cascade}) + -> 0.0120s +-- add_index("chat_teams", ["namespace_id"], {:name=>"index_chat_teams_on_namespace_id", :unique=>true, :using=>:btree}) + -> 0.0135s +-- create_table("ci_build_trace_section_names", {:force=>:cascade}) + -> 0.0125s +-- add_index("ci_build_trace_section_names", ["project_id", "name"], {:name=>"index_ci_build_trace_section_names_on_project_id_and_name", :unique=>true, :using=>:btree}) + -> 0.0087s +-- create_table("ci_build_trace_sections", {:force=>:cascade}) + -> 0.0094s +-- add_index("ci_build_trace_sections", ["build_id", "section_name_id"], {:name=>"index_ci_build_trace_sections_on_build_id_and_section_name_id", :unique=>true, :using=>:btree}) + -> 0.0916s +-- add_index("ci_build_trace_sections", ["project_id"], {:name=>"index_ci_build_trace_sections_on_project_id", :using=>:btree}) + -> 0.0089s +-- add_index("ci_build_trace_sections", ["section_name_id"], {:name=>"index_ci_build_trace_sections_on_section_name_id", :using=>:btree}) + -> 0.0132s +-- create_table("ci_builds", {:force=>:cascade}) + -> 0.0140s +-- add_index("ci_builds", ["artifacts_expire_at"], {:name=>"index_ci_builds_on_artifacts_expire_at", :where=>"(artifacts_file <> ''::text)", :using=>:btree}) + -> 0.0325s +-- add_index("ci_builds", ["auto_canceled_by_id"], {:name=>"index_ci_builds_on_auto_canceled_by_id", :using=>:btree}) + -> 0.0081s +-- add_index("ci_builds", ["commit_id", "stage_idx", "created_at"], {:name=>"index_ci_builds_on_commit_id_and_stage_idx_and_created_at", :using=>:btree}) + -> 0.0114s +-- add_index("ci_builds", ["commit_id", "status", "type"], {:name=>"index_ci_builds_on_commit_id_and_status_and_type", :using=>:btree}) + -> 0.0119s +-- add_index("ci_builds", ["commit_id", "type", "name", "ref"], {:name=>"index_ci_builds_on_commit_id_and_type_and_name_and_ref", :using=>:btree}) + -> 0.0116s +-- add_index("ci_builds", ["commit_id", "type", "ref"], {:name=>"index_ci_builds_on_commit_id_and_type_and_ref", :using=>:btree}) + -> 0.0144s +-- add_index("ci_builds", ["project_id", "id"], {:name=>"index_ci_builds_on_project_id_and_id", :using=>:btree}) + -> 0.0136s +-- add_index("ci_builds", ["protected"], {:name=>"index_ci_builds_on_protected", :using=>:btree}) + -> 0.0113s +-- add_index("ci_builds", ["runner_id"], {:name=>"index_ci_builds_on_runner_id", :using=>:btree}) + -> 0.0082s +-- add_index("ci_builds", ["stage_id"], {:name=>"index_ci_builds_on_stage_id", :using=>:btree}) + -> 0.0086s +-- add_index("ci_builds", ["status", "type", "runner_id"], {:name=>"index_ci_builds_on_status_and_type_and_runner_id", :using=>:btree}) + -> 0.0091s +-- add_index("ci_builds", ["status"], {:name=>"index_ci_builds_on_status", :using=>:btree}) + -> 0.0081s +-- add_index("ci_builds", ["token"], {:name=>"index_ci_builds_on_token", :unique=>true, :using=>:btree}) + -> 0.0103s +-- add_index("ci_builds", ["updated_at"], {:name=>"index_ci_builds_on_updated_at", :using=>:btree}) + -> 0.0149s +-- add_index("ci_builds", ["user_id"], {:name=>"index_ci_builds_on_user_id", :using=>:btree}) + -> 0.0156s +-- create_table("ci_builds_metadata", {:force=>:cascade}) + -> 0.0134s +-- add_index("ci_builds_metadata", ["build_id"], {:name=>"index_ci_builds_metadata_on_build_id", :unique=>true, :using=>:btree}) + -> 0.0067s +-- add_index("ci_builds_metadata", ["project_id"], {:name=>"index_ci_builds_metadata_on_project_id", :using=>:btree}) + -> 0.0061s +-- create_table("ci_group_variables", {:force=>:cascade}) + -> 0.0088s +-- add_index("ci_group_variables", ["group_id", "key"], {:name=>"index_ci_group_variables_on_group_id_and_key", :unique=>true, :using=>:btree}) + -> 0.0073s +-- create_table("ci_job_artifacts", {:force=>:cascade}) + -> 0.0089s +-- add_index("ci_job_artifacts", ["expire_at", "job_id"], {:name=>"index_ci_job_artifacts_on_expire_at_and_job_id", :using=>:btree}) + -> 0.0061s +-- add_index("ci_job_artifacts", ["job_id", "file_type"], {:name=>"index_ci_job_artifacts_on_job_id_and_file_type", :unique=>true, :using=>:btree}) + -> 0.0077s +-- add_index("ci_job_artifacts", ["project_id"], {:name=>"index_ci_job_artifacts_on_project_id", :using=>:btree}) + -> 0.0071s +-- create_table("ci_pipeline_schedule_variables", {:force=>:cascade}) + -> 0.0512s +-- add_index("ci_pipeline_schedule_variables", ["pipeline_schedule_id", "key"], {:name=>"index_ci_pipeline_schedule_variables_on_schedule_id_and_key", :unique=>true, :using=>:btree}) + -> 0.0144s +-- create_table("ci_pipeline_schedules", {:force=>:cascade}) + -> 0.0603s +-- add_index("ci_pipeline_schedules", ["next_run_at", "active"], {:name=>"index_ci_pipeline_schedules_on_next_run_at_and_active", :using=>:btree}) + -> 0.0247s +-- add_index("ci_pipeline_schedules", ["project_id"], {:name=>"index_ci_pipeline_schedules_on_project_id", :using=>:btree}) + -> 0.0082s +-- create_table("ci_pipeline_variables", {:force=>:cascade}) + -> 0.0112s +-- add_index("ci_pipeline_variables", ["pipeline_id", "key"], {:name=>"index_ci_pipeline_variables_on_pipeline_id_and_key", :unique=>true, :using=>:btree}) + -> 0.0075s +-- create_table("ci_pipelines", {:force=>:cascade}) + -> 0.0111s +-- add_index("ci_pipelines", ["auto_canceled_by_id"], {:name=>"index_ci_pipelines_on_auto_canceled_by_id", :using=>:btree}) + -> 0.0074s +-- add_index("ci_pipelines", ["pipeline_schedule_id"], {:name=>"index_ci_pipelines_on_pipeline_schedule_id", :using=>:btree}) + -> 0.0086s +-- add_index("ci_pipelines", ["project_id", "ref", "status", "id"], {:name=>"index_ci_pipelines_on_project_id_and_ref_and_status_and_id", :using=>:btree}) + -> 0.0104s +-- add_index("ci_pipelines", ["project_id", "sha"], {:name=>"index_ci_pipelines_on_project_id_and_sha", :using=>:btree}) + -> 0.0107s +-- add_index("ci_pipelines", ["project_id"], {:name=>"index_ci_pipelines_on_project_id", :using=>:btree}) + -> 0.0084s +-- add_index("ci_pipelines", ["status"], {:name=>"index_ci_pipelines_on_status", :using=>:btree}) + -> 0.0065s +-- add_index("ci_pipelines", ["user_id"], {:name=>"index_ci_pipelines_on_user_id", :using=>:btree}) + -> 0.0071s +-- create_table("ci_runner_projects", {:force=>:cascade}) + -> 0.0077s +-- add_index("ci_runner_projects", ["project_id"], {:name=>"index_ci_runner_projects_on_project_id", :using=>:btree}) + -> 0.0072s +-- add_index("ci_runner_projects", ["runner_id"], {:name=>"index_ci_runner_projects_on_runner_id", :using=>:btree}) + -> 0.0064s +-- create_table("ci_runners", {:force=>:cascade}) + -> 0.0090s +-- add_index("ci_runners", ["contacted_at"], {:name=>"index_ci_runners_on_contacted_at", :using=>:btree}) + -> 0.0078s +-- add_index("ci_runners", ["is_shared"], {:name=>"index_ci_runners_on_is_shared", :using=>:btree}) + -> 0.0054s +-- add_index("ci_runners", ["locked"], {:name=>"index_ci_runners_on_locked", :using=>:btree}) + -> 0.0052s +-- add_index("ci_runners", ["token"], {:name=>"index_ci_runners_on_token", :using=>:btree}) + -> 0.0057s +-- create_table("ci_stages", {:force=>:cascade}) + -> 0.0059s +-- add_index("ci_stages", ["pipeline_id", "name"], {:name=>"index_ci_stages_on_pipeline_id_and_name", :unique=>true, :using=>:btree}) + -> 0.0054s +-- add_index("ci_stages", ["pipeline_id"], {:name=>"index_ci_stages_on_pipeline_id", :using=>:btree}) + -> 0.0045s +-- add_index("ci_stages", ["project_id"], {:name=>"index_ci_stages_on_project_id", :using=>:btree}) + -> 0.0053s +-- create_table("ci_trigger_requests", {:force=>:cascade}) + -> 0.0079s +-- add_index("ci_trigger_requests", ["commit_id"], {:name=>"index_ci_trigger_requests_on_commit_id", :using=>:btree}) + -> 0.0059s +-- create_table("ci_triggers", {:force=>:cascade}) + -> 0.0100s +-- add_index("ci_triggers", ["project_id"], {:name=>"index_ci_triggers_on_project_id", :using=>:btree}) + -> 0.0059s +-- create_table("ci_variables", {:force=>:cascade}) + -> 0.0110s +-- add_index("ci_variables", ["project_id", "key", "environment_scope"], {:name=>"index_ci_variables_on_project_id_and_key_and_environment_scope", :unique=>true, :using=>:btree}) + -> 0.0066s +-- create_table("cluster_platforms_kubernetes", {:force=>:cascade}) + -> 0.0082s +-- add_index("cluster_platforms_kubernetes", ["cluster_id"], {:name=>"index_cluster_platforms_kubernetes_on_cluster_id", :unique=>true, :using=>:btree}) + -> 0.0047s +-- create_table("cluster_projects", {:force=>:cascade}) + -> 0.0079s +-- add_index("cluster_projects", ["cluster_id"], {:name=>"index_cluster_projects_on_cluster_id", :using=>:btree}) + -> 0.0045s +-- add_index("cluster_projects", ["project_id"], {:name=>"index_cluster_projects_on_project_id", :using=>:btree}) + -> 0.0044s +-- create_table("cluster_providers_gcp", {:force=>:cascade}) + -> 0.0247s +-- add_index("cluster_providers_gcp", ["cluster_id"], {:name=>"index_cluster_providers_gcp_on_cluster_id", :unique=>true, :using=>:btree}) + -> 0.0088s +-- create_table("clusters", {:force=>:cascade}) + -> 0.0767s +-- add_index("clusters", ["enabled"], {:name=>"index_clusters_on_enabled", :using=>:btree}) + -> 0.0162s +-- add_index("clusters", ["user_id"], {:name=>"index_clusters_on_user_id", :using=>:btree}) + -> 0.0216s +-- create_table("clusters_applications_helm", {:force=>:cascade}) + -> 0.0379s +-- create_table("clusters_applications_ingress", {:force=>:cascade}) + -> 0.0409s +-- create_table("clusters_applications_prometheus", {:force=>:cascade}) + -> 0.0178s +-- create_table("clusters_applications_runners", {:force=>:cascade}) + -> 0.0471s +-- add_index("clusters_applications_runners", ["cluster_id"], {:name=>"index_clusters_applications_runners_on_cluster_id", :unique=>true, :using=>:btree}) + -> 0.0487s +-- add_index("clusters_applications_runners", ["runner_id"], {:name=>"index_clusters_applications_runners_on_runner_id", :using=>:btree}) + -> 0.0094s +-- create_table("container_repositories", {:force=>:cascade}) + -> 0.0142s +-- add_index("container_repositories", ["project_id", "name"], {:name=>"index_container_repositories_on_project_id_and_name", :unique=>true, :using=>:btree}) + -> 0.0080s +-- add_index("container_repositories", ["project_id"], {:name=>"index_container_repositories_on_project_id", :using=>:btree}) + -> 0.0070s +-- create_table("conversational_development_index_metrics", {:force=>:cascade}) + -> 0.0204s +-- create_table("deploy_keys_projects", {:force=>:cascade}) + -> 0.0154s +-- add_index("deploy_keys_projects", ["project_id"], {:name=>"index_deploy_keys_projects_on_project_id", :using=>:btree}) + -> 0.0471s +-- create_table("deployments", {:force=>:cascade}) + -> 0.0191s +-- add_index("deployments", ["created_at"], {:name=>"index_deployments_on_created_at", :using=>:btree}) + -> 0.0552s +-- add_index("deployments", ["environment_id", "id"], {:name=>"index_deployments_on_environment_id_and_id", :using=>:btree}) + -> 0.0294s +-- add_index("deployments", ["environment_id", "iid", "project_id"], {:name=>"index_deployments_on_environment_id_and_iid_and_project_id", :using=>:btree}) + -> 0.0408s +-- add_index("deployments", ["project_id", "iid"], {:name=>"index_deployments_on_project_id_and_iid", :unique=>true, :using=>:btree}) + -> 0.0094s +-- create_table("emails", {:force=>:cascade}) + -> 0.0127s +-- add_index("emails", ["confirmation_token"], {:name=>"index_emails_on_confirmation_token", :unique=>true, :using=>:btree}) + -> 0.0082s +-- add_index("emails", ["email"], {:name=>"index_emails_on_email", :unique=>true, :using=>:btree}) + -> 0.0110s +-- add_index("emails", ["user_id"], {:name=>"index_emails_on_user_id", :using=>:btree}) + -> 0.0079s +-- create_table("environments", {:force=>:cascade}) + -> 0.0106s +-- add_index("environments", ["project_id", "name"], {:name=>"index_environments_on_project_id_and_name", :unique=>true, :using=>:btree}) + -> 0.0086s +-- add_index("environments", ["project_id", "slug"], {:name=>"index_environments_on_project_id_and_slug", :unique=>true, :using=>:btree}) + -> 0.0076s +-- create_table("events", {:force=>:cascade}) + -> 0.0122s +-- add_index("events", ["action"], {:name=>"index_events_on_action", :using=>:btree}) + -> 0.0068s +-- add_index("events", ["author_id", "project_id"], {:name=>"index_events_on_author_id_and_project_id", :using=>:btree}) + -> 0.0081s +-- add_index("events", ["project_id", "id"], {:name=>"index_events_on_project_id_and_id", :using=>:btree}) + -> 0.0064s +-- add_index("events", ["target_type", "target_id"], {:name=>"index_events_on_target_type_and_target_id", :using=>:btree}) + -> 0.0087s +-- create_table("feature_gates", {:force=>:cascade}) + -> 0.0105s +-- add_index("feature_gates", ["feature_key", "key", "value"], {:name=>"index_feature_gates_on_feature_key_and_key_and_value", :unique=>true, :using=>:btree}) + -> 0.0080s +-- create_table("features", {:force=>:cascade}) + -> 0.0086s +-- add_index("features", ["key"], {:name=>"index_features_on_key", :unique=>true, :using=>:btree}) + -> 0.0058s +-- create_table("fork_network_members", {:force=>:cascade}) + -> 0.0081s +-- add_index("fork_network_members", ["fork_network_id"], {:name=>"index_fork_network_members_on_fork_network_id", :using=>:btree}) + -> 0.0056s +-- add_index("fork_network_members", ["project_id"], {:name=>"index_fork_network_members_on_project_id", :unique=>true, :using=>:btree}) + -> 0.0053s +-- create_table("fork_networks", {:force=>:cascade}) + -> 0.0081s +-- add_index("fork_networks", ["root_project_id"], {:name=>"index_fork_networks_on_root_project_id", :unique=>true, :using=>:btree}) + -> 0.0051s +-- create_table("forked_project_links", {:force=>:cascade}) + -> 0.0070s +-- add_index("forked_project_links", ["forked_to_project_id"], {:name=>"index_forked_project_links_on_forked_to_project_id", :unique=>true, :using=>:btree}) + -> 0.0061s +-- create_table("gcp_clusters", {:force=>:cascade}) + -> 0.0090s +-- add_index("gcp_clusters", ["project_id"], {:name=>"index_gcp_clusters_on_project_id", :unique=>true, :using=>:btree}) + -> 0.0073s +-- create_table("gpg_key_subkeys", {:force=>:cascade}) + -> 0.0092s +-- add_index("gpg_key_subkeys", ["fingerprint"], {:name=>"index_gpg_key_subkeys_on_fingerprint", :unique=>true, :using=>:btree}) + -> 0.0063s +-- add_index("gpg_key_subkeys", ["gpg_key_id"], {:name=>"index_gpg_key_subkeys_on_gpg_key_id", :using=>:btree}) + -> 0.0603s +-- add_index("gpg_key_subkeys", ["keyid"], {:name=>"index_gpg_key_subkeys_on_keyid", :unique=>true, :using=>:btree}) + -> 0.0705s +-- create_table("gpg_keys", {:force=>:cascade}) + -> 0.0235s +-- add_index("gpg_keys", ["fingerprint"], {:name=>"index_gpg_keys_on_fingerprint", :unique=>true, :using=>:btree}) + -> 0.0220s +-- add_index("gpg_keys", ["primary_keyid"], {:name=>"index_gpg_keys_on_primary_keyid", :unique=>true, :using=>:btree}) + -> 0.0329s +-- add_index("gpg_keys", ["user_id"], {:name=>"index_gpg_keys_on_user_id", :using=>:btree}) + -> 0.0087s +-- create_table("gpg_signatures", {:force=>:cascade}) + -> 0.0126s +-- add_index("gpg_signatures", ["commit_sha"], {:name=>"index_gpg_signatures_on_commit_sha", :unique=>true, :using=>:btree}) + -> 0.0105s +-- add_index("gpg_signatures", ["gpg_key_id"], {:name=>"index_gpg_signatures_on_gpg_key_id", :using=>:btree}) + -> 0.0094s +-- add_index("gpg_signatures", ["gpg_key_primary_keyid"], {:name=>"index_gpg_signatures_on_gpg_key_primary_keyid", :using=>:btree}) + -> 0.0100s +-- add_index("gpg_signatures", ["gpg_key_subkey_id"], {:name=>"index_gpg_signatures_on_gpg_key_subkey_id", :using=>:btree}) + -> 0.0079s +-- add_index("gpg_signatures", ["project_id"], {:name=>"index_gpg_signatures_on_project_id", :using=>:btree}) + -> 0.0081s +-- create_table("group_custom_attributes", {:force=>:cascade}) + -> 0.0092s +-- add_index("group_custom_attributes", ["group_id", "key"], {:name=>"index_group_custom_attributes_on_group_id_and_key", :unique=>true, :using=>:btree}) + -> 0.0086s +-- add_index("group_custom_attributes", ["key", "value"], {:name=>"index_group_custom_attributes_on_key_and_value", :using=>:btree}) + -> 0.0071s +-- create_table("identities", {:force=>:cascade}) + -> 0.0114s +-- add_index("identities", ["user_id"], {:name=>"index_identities_on_user_id", :using=>:btree}) + -> 0.0064s +-- create_table("internal_ids", {:id=>:bigserial, :force=>:cascade}) + -> 0.0097s +-- add_index("internal_ids", ["usage", "project_id"], {:name=>"index_internal_ids_on_usage_and_project_id", :unique=>true, :using=>:btree}) + -> 0.0073s +-- create_table("issue_assignees", {:id=>false, :force=>:cascade}) + -> 0.0127s +-- add_index("issue_assignees", ["issue_id", "user_id"], {:name=>"index_issue_assignees_on_issue_id_and_user_id", :unique=>true, :using=>:btree}) + -> 0.0110s +-- add_index("issue_assignees", ["user_id"], {:name=>"index_issue_assignees_on_user_id", :using=>:btree}) + -> 0.0079s +-- create_table("issue_metrics", {:force=>:cascade}) + -> 0.0098s +-- add_index("issue_metrics", ["issue_id"], {:name=>"index_issue_metrics", :using=>:btree}) + -> 0.0053s +-- create_table("issues", {:force=>:cascade}) + -> 0.0090s +-- add_index("issues", ["author_id"], {:name=>"index_issues_on_author_id", :using=>:btree}) + -> 0.0056s +-- add_index("issues", ["confidential"], {:name=>"index_issues_on_confidential", :using=>:btree}) + -> 0.0055s +-- add_index("issues", ["description"], {:name=>"index_issues_on_description_trigram", :using=>:gin, :opclasses=>{"description"=>"gin_trgm_ops"}}) + -> 0.0006s +-- add_index("issues", ["milestone_id"], {:name=>"index_issues_on_milestone_id", :using=>:btree}) + -> 0.0061s +-- add_index("issues", ["moved_to_id"], {:name=>"index_issues_on_moved_to_id", :where=>"(moved_to_id IS NOT NULL)", :using=>:btree}) + -> 0.0051s +-- add_index("issues", ["project_id", "created_at", "id", "state"], {:name=>"index_issues_on_project_id_and_created_at_and_id_and_state", :using=>:btree}) + -> 0.0069s +-- add_index("issues", ["project_id", "due_date", "id", "state"], {:name=>"idx_issues_on_project_id_and_due_date_and_id_and_state_partial", :where=>"(due_date IS NOT NULL)", :using=>:btree}) + -> 0.0073s +-- add_index("issues", ["project_id", "iid"], {:name=>"index_issues_on_project_id_and_iid", :unique=>true, :using=>:btree}) + -> 0.0060s +-- add_index("issues", ["project_id", "updated_at", "id", "state"], {:name=>"index_issues_on_project_id_and_updated_at_and_id_and_state", :using=>:btree}) + -> 0.0094s +-- add_index("issues", ["relative_position"], {:name=>"index_issues_on_relative_position", :using=>:btree}) + -> 0.0070s +-- add_index("issues", ["state"], {:name=>"index_issues_on_state", :using=>:btree}) + -> 0.0078s +-- add_index("issues", ["title"], {:name=>"index_issues_on_title_trigram", :using=>:gin, :opclasses=>{"title"=>"gin_trgm_ops"}}) + -> 0.0007s +-- add_index("issues", ["updated_at"], {:name=>"index_issues_on_updated_at", :using=>:btree}) + -> 0.0068s +-- add_index("issues", ["updated_by_id"], {:name=>"index_issues_on_updated_by_id", :where=>"(updated_by_id IS NOT NULL)", :using=>:btree}) + -> 0.0066s +-- create_table("keys", {:force=>:cascade}) + -> 0.0087s +-- add_index("keys", ["fingerprint"], {:name=>"index_keys_on_fingerprint", :unique=>true, :using=>:btree}) + -> 0.0066s +-- add_index("keys", ["user_id"], {:name=>"index_keys_on_user_id", :using=>:btree}) + -> 0.0063s +-- create_table("label_links", {:force=>:cascade}) + -> 0.0073s +-- add_index("label_links", ["label_id"], {:name=>"index_label_links_on_label_id", :using=>:btree}) + -> 0.0050s +-- add_index("label_links", ["target_id", "target_type"], {:name=>"index_label_links_on_target_id_and_target_type", :using=>:btree}) + -> 0.0062s +-- create_table("label_priorities", {:force=>:cascade}) + -> 0.0073s +-- add_index("label_priorities", ["priority"], {:name=>"index_label_priorities_on_priority", :using=>:btree}) + -> 0.0058s +-- add_index("label_priorities", ["project_id", "label_id"], {:name=>"index_label_priorities_on_project_id_and_label_id", :unique=>true, :using=>:btree}) + -> 0.0056s +-- create_table("labels", {:force=>:cascade}) + -> 0.0087s +-- add_index("labels", ["group_id", "project_id", "title"], {:name=>"index_labels_on_group_id_and_project_id_and_title", :unique=>true, :using=>:btree}) + -> 0.0074s +-- add_index("labels", ["project_id"], {:name=>"index_labels_on_project_id", :using=>:btree}) + -> 0.0061s +-- add_index("labels", ["template"], {:name=>"index_labels_on_template", :where=>"template", :using=>:btree}) + -> 0.0060s +-- add_index("labels", ["title"], {:name=>"index_labels_on_title", :using=>:btree}) + -> 0.0076s +-- add_index("labels", ["type", "project_id"], {:name=>"index_labels_on_type_and_project_id", :using=>:btree}) + -> 0.0061s +-- create_table("lfs_file_locks", {:force=>:cascade}) + -> 0.0078s +-- add_index("lfs_file_locks", ["project_id", "path"], {:name=>"index_lfs_file_locks_on_project_id_and_path", :unique=>true, :using=>:btree}) + -> 0.0067s +-- add_index("lfs_file_locks", ["user_id"], {:name=>"index_lfs_file_locks_on_user_id", :using=>:btree}) + -> 0.0060s +-- create_table("lfs_objects", {:force=>:cascade}) + -> 0.0109s +-- add_index("lfs_objects", ["oid"], {:name=>"index_lfs_objects_on_oid", :unique=>true, :using=>:btree}) + -> 0.0059s +-- create_table("lfs_objects_projects", {:force=>:cascade}) + -> 0.0091s +-- add_index("lfs_objects_projects", ["project_id"], {:name=>"index_lfs_objects_projects_on_project_id", :using=>:btree}) + -> 0.0060s +-- create_table("lists", {:force=>:cascade}) + -> 0.0115s +-- add_index("lists", ["board_id", "label_id"], {:name=>"index_lists_on_board_id_and_label_id", :unique=>true, :using=>:btree}) + -> 0.0055s +-- add_index("lists", ["label_id"], {:name=>"index_lists_on_label_id", :using=>:btree}) + -> 0.0055s +-- create_table("members", {:force=>:cascade}) + -> 0.0140s +-- add_index("members", ["access_level"], {:name=>"index_members_on_access_level", :using=>:btree}) + -> 0.0067s +-- add_index("members", ["invite_token"], {:name=>"index_members_on_invite_token", :unique=>true, :using=>:btree}) + -> 0.0069s +-- add_index("members", ["requested_at"], {:name=>"index_members_on_requested_at", :using=>:btree}) + -> 0.0057s +-- add_index("members", ["source_id", "source_type"], {:name=>"index_members_on_source_id_and_source_type", :using=>:btree}) + -> 0.0057s +-- add_index("members", ["user_id"], {:name=>"index_members_on_user_id", :using=>:btree}) + -> 0.0073s +-- create_table("merge_request_diff_commits", {:id=>false, :force=>:cascade}) + -> 0.0087s +-- add_index("merge_request_diff_commits", ["merge_request_diff_id", "relative_order"], {:name=>"index_merge_request_diff_commits_on_mr_diff_id_and_order", :unique=>true, :using=>:btree}) + -> 0.0151s +-- add_index("merge_request_diff_commits", ["sha"], {:name=>"index_merge_request_diff_commits_on_sha", :using=>:btree}) + -> 0.0057s +-- create_table("merge_request_diff_files", {:id=>false, :force=>:cascade}) + -> 0.0094s +-- add_index("merge_request_diff_files", ["merge_request_diff_id", "relative_order"], {:name=>"index_merge_request_diff_files_on_mr_diff_id_and_order", :unique=>true, :using=>:btree}) + -> 0.0138s +-- create_table("merge_request_diffs", {:force=>:cascade}) + -> 0.0077s +-- add_index("merge_request_diffs", ["merge_request_id", "id"], {:name=>"index_merge_request_diffs_on_merge_request_id_and_id", :using=>:btree}) + -> 0.0060s +-- create_table("merge_request_metrics", {:force=>:cascade}) + -> 0.0098s +-- add_index("merge_request_metrics", ["first_deployed_to_production_at"], {:name=>"index_merge_request_metrics_on_first_deployed_to_production_at", :using=>:btree}) + -> 0.0060s +-- add_index("merge_request_metrics", ["merge_request_id"], {:name=>"index_merge_request_metrics", :using=>:btree}) + -> 0.0050s +-- add_index("merge_request_metrics", ["pipeline_id"], {:name=>"index_merge_request_metrics_on_pipeline_id", :using=>:btree}) + -> 0.0045s +-- create_table("merge_requests", {:force=>:cascade}) + -> 0.0066s +-- add_index("merge_requests", ["assignee_id"], {:name=>"index_merge_requests_on_assignee_id", :using=>:btree}) + -> 0.0072s +-- add_index("merge_requests", ["author_id"], {:name=>"index_merge_requests_on_author_id", :using=>:btree}) + -> 0.0050s +-- add_index("merge_requests", ["created_at"], {:name=>"index_merge_requests_on_created_at", :using=>:btree}) + -> 0.0053s +-- add_index("merge_requests", ["description"], {:name=>"index_merge_requests_on_description_trigram", :using=>:gin, :opclasses=>{"description"=>"gin_trgm_ops"}}) + -> 0.0008s +-- add_index("merge_requests", ["head_pipeline_id"], {:name=>"index_merge_requests_on_head_pipeline_id", :using=>:btree}) + -> 0.0053s +-- add_index("merge_requests", ["latest_merge_request_diff_id"], {:name=>"index_merge_requests_on_latest_merge_request_diff_id", :using=>:btree}) + -> 0.0048s +-- add_index("merge_requests", ["merge_user_id"], {:name=>"index_merge_requests_on_merge_user_id", :where=>"(merge_user_id IS NOT NULL)", :using=>:btree}) + -> 0.0051s +-- add_index("merge_requests", ["milestone_id"], {:name=>"index_merge_requests_on_milestone_id", :using=>:btree}) + -> 0.0055s +-- add_index("merge_requests", ["source_branch"], {:name=>"index_merge_requests_on_source_branch", :using=>:btree}) + -> 0.0055s +-- add_index("merge_requests", ["source_project_id", "source_branch"], {:name=>"index_merge_requests_on_source_project_and_branch_state_opened", :where=>"((state)::text = 'opened'::text)", :using=>:btree}) + -> 0.0061s +-- add_index("merge_requests", ["source_project_id", "source_branch"], {:name=>"index_merge_requests_on_source_project_id_and_source_branch", :using=>:btree}) + -> 0.0068s +-- add_index("merge_requests", ["target_branch"], {:name=>"index_merge_requests_on_target_branch", :using=>:btree}) + -> 0.0054s +-- add_index("merge_requests", ["target_project_id", "iid"], {:name=>"index_merge_requests_on_target_project_id_and_iid", :unique=>true, :using=>:btree}) + -> 0.0061s +-- add_index("merge_requests", ["target_project_id", "merge_commit_sha", "id"], {:name=>"index_merge_requests_on_tp_id_and_merge_commit_sha_and_id", :using=>:btree}) + -> 0.0077s +-- add_index("merge_requests", ["title"], {:name=>"index_merge_requests_on_title", :using=>:btree}) + -> 0.0105s +-- add_index("merge_requests", ["title"], {:name=>"index_merge_requests_on_title_trigram", :using=>:gin, :opclasses=>{"title"=>"gin_trgm_ops"}}) + -> 0.0008s +-- add_index("merge_requests", ["updated_by_id"], {:name=>"index_merge_requests_on_updated_by_id", :where=>"(updated_by_id IS NOT NULL)", :using=>:btree}) + -> 0.0074s +-- create_table("merge_requests_closing_issues", {:force=>:cascade}) + -> 0.0125s +-- add_index("merge_requests_closing_issues", ["issue_id"], {:name=>"index_merge_requests_closing_issues_on_issue_id", :using=>:btree}) + -> 0.0064s +-- add_index("merge_requests_closing_issues", ["merge_request_id"], {:name=>"index_merge_requests_closing_issues_on_merge_request_id", :using=>:btree}) + -> 0.0061s +-- create_table("milestones", {:force=>:cascade}) + -> 0.0064s +-- add_index("milestones", ["description"], {:name=>"index_milestones_on_description_trigram", :using=>:gin, :opclasses=>{"description"=>"gin_trgm_ops"}}) + -> 0.0007s +-- add_index("milestones", ["due_date"], {:name=>"index_milestones_on_due_date", :using=>:btree}) + -> 0.0053s +-- add_index("milestones", ["group_id"], {:name=>"index_milestones_on_group_id", :using=>:btree}) + -> 0.0068s +-- add_index("milestones", ["project_id", "iid"], {:name=>"index_milestones_on_project_id_and_iid", :unique=>true, :using=>:btree}) + -> 0.0057s +-- add_index("milestones", ["title"], {:name=>"index_milestones_on_title", :using=>:btree}) + -> 0.0051s +-- add_index("milestones", ["title"], {:name=>"index_milestones_on_title_trigram", :using=>:gin, :opclasses=>{"title"=>"gin_trgm_ops"}}) + -> 0.0006s +-- create_table("namespaces", {:force=>:cascade}) + -> 0.0083s +-- add_index("namespaces", ["created_at"], {:name=>"index_namespaces_on_created_at", :using=>:btree}) + -> 0.0061s +-- add_index("namespaces", ["name", "parent_id"], {:name=>"index_namespaces_on_name_and_parent_id", :unique=>true, :using=>:btree}) + -> 0.0062s +-- add_index("namespaces", ["name"], {:name=>"index_namespaces_on_name_trigram", :using=>:gin, :opclasses=>{"name"=>"gin_trgm_ops"}}) + -> 0.0006s +-- add_index("namespaces", ["owner_id"], {:name=>"index_namespaces_on_owner_id", :using=>:btree}) + -> 0.0061s +-- add_index("namespaces", ["parent_id", "id"], {:name=>"index_namespaces_on_parent_id_and_id", :unique=>true, :using=>:btree}) + -> 0.0072s +-- add_index("namespaces", ["path"], {:name=>"index_namespaces_on_path", :using=>:btree}) + -> 0.0056s +-- add_index("namespaces", ["path"], {:name=>"index_namespaces_on_path_trigram", :using=>:gin, :opclasses=>{"path"=>"gin_trgm_ops"}}) + -> 0.0006s +-- add_index("namespaces", ["require_two_factor_authentication"], {:name=>"index_namespaces_on_require_two_factor_authentication", :using=>:btree}) + -> 0.0061s +-- add_index("namespaces", ["type"], {:name=>"index_namespaces_on_type", :using=>:btree}) + -> 0.0055s +-- create_table("notes", {:force=>:cascade}) + -> 0.0092s +-- add_index("notes", ["author_id"], {:name=>"index_notes_on_author_id", :using=>:btree}) + -> 0.0072s +-- add_index("notes", ["commit_id"], {:name=>"index_notes_on_commit_id", :using=>:btree}) + -> 0.0057s +-- add_index("notes", ["created_at"], {:name=>"index_notes_on_created_at", :using=>:btree}) + -> 0.0065s +-- add_index("notes", ["discussion_id"], {:name=>"index_notes_on_discussion_id", :using=>:btree}) + -> 0.0064s +-- add_index("notes", ["line_code"], {:name=>"index_notes_on_line_code", :using=>:btree}) + -> 0.0078s +-- add_index("notes", ["note"], {:name=>"index_notes_on_note_trigram", :using=>:gin, :opclasses=>{"note"=>"gin_trgm_ops"}}) + -> 0.0006s +-- add_index("notes", ["noteable_id", "noteable_type"], {:name=>"index_notes_on_noteable_id_and_noteable_type", :using=>:btree}) + -> 0.0102s +-- add_index("notes", ["noteable_type"], {:name=>"index_notes_on_noteable_type", :using=>:btree}) + -> 0.0092s +-- add_index("notes", ["project_id", "noteable_type"], {:name=>"index_notes_on_project_id_and_noteable_type", :using=>:btree}) + -> 0.0082s +-- add_index("notes", ["updated_at"], {:name=>"index_notes_on_updated_at", :using=>:btree}) + -> 0.0062s +-- create_table("notification_settings", {:force=>:cascade}) + -> 0.0088s +-- add_index("notification_settings", ["source_id", "source_type"], {:name=>"index_notification_settings_on_source_id_and_source_type", :using=>:btree}) + -> 0.0405s +-- add_index("notification_settings", ["user_id", "source_id", "source_type"], {:name=>"index_notifications_on_user_id_and_source_id_and_source_type", :unique=>true, :using=>:btree}) + -> 0.0677s +-- add_index("notification_settings", ["user_id"], {:name=>"index_notification_settings_on_user_id", :using=>:btree}) + -> 0.1199s +-- create_table("oauth_access_grants", {:force=>:cascade}) + -> 0.0140s +-- add_index("oauth_access_grants", ["token"], {:name=>"index_oauth_access_grants_on_token", :unique=>true, :using=>:btree}) + -> 0.0076s +-- create_table("oauth_access_tokens", {:force=>:cascade}) + -> 0.0167s +-- add_index("oauth_access_tokens", ["refresh_token"], {:name=>"index_oauth_access_tokens_on_refresh_token", :unique=>true, :using=>:btree}) + -> 0.0098s +-- add_index("oauth_access_tokens", ["resource_owner_id"], {:name=>"index_oauth_access_tokens_on_resource_owner_id", :using=>:btree}) + -> 0.0074s +-- add_index("oauth_access_tokens", ["token"], {:name=>"index_oauth_access_tokens_on_token", :unique=>true, :using=>:btree}) + -> 0.0078s +-- create_table("oauth_applications", {:force=>:cascade}) + -> 0.0112s +-- add_index("oauth_applications", ["owner_id", "owner_type"], {:name=>"index_oauth_applications_on_owner_id_and_owner_type", :using=>:btree}) + -> 0.0079s +-- add_index("oauth_applications", ["uid"], {:name=>"index_oauth_applications_on_uid", :unique=>true, :using=>:btree}) + -> 0.0114s +-- create_table("oauth_openid_requests", {:force=>:cascade}) + -> 0.0102s +-- create_table("pages_domains", {:force=>:cascade}) + -> 0.0102s +-- add_index("pages_domains", ["domain"], {:name=>"index_pages_domains_on_domain", :unique=>true, :using=>:btree}) + -> 0.0067s +-- add_index("pages_domains", ["project_id", "enabled_until"], {:name=>"index_pages_domains_on_project_id_and_enabled_until", :using=>:btree}) + -> 0.0114s +-- add_index("pages_domains", ["project_id"], {:name=>"index_pages_domains_on_project_id", :using=>:btree}) + -> 0.0066s +-- add_index("pages_domains", ["verified_at", "enabled_until"], {:name=>"index_pages_domains_on_verified_at_and_enabled_until", :using=>:btree}) + -> 0.0073s +-- add_index("pages_domains", ["verified_at"], {:name=>"index_pages_domains_on_verified_at", :using=>:btree}) + -> 0.0063s +-- create_table("personal_access_tokens", {:force=>:cascade}) + -> 0.0084s +-- add_index("personal_access_tokens", ["token"], {:name=>"index_personal_access_tokens_on_token", :unique=>true, :using=>:btree}) + -> 0.0075s +-- add_index("personal_access_tokens", ["user_id"], {:name=>"index_personal_access_tokens_on_user_id", :using=>:btree}) + -> 0.0066s +-- create_table("project_authorizations", {:id=>false, :force=>:cascade}) + -> 0.0087s +-- add_index("project_authorizations", ["project_id"], {:name=>"index_project_authorizations_on_project_id", :using=>:btree}) + -> 0.0056s +-- add_index("project_authorizations", ["user_id", "project_id", "access_level"], {:name=>"index_project_authorizations_on_user_id_project_id_access_level", :unique=>true, :using=>:btree}) + -> 0.0075s +-- create_table("project_auto_devops", {:force=>:cascade}) + -> 0.0079s +-- add_index("project_auto_devops", ["project_id"], {:name=>"index_project_auto_devops_on_project_id", :unique=>true, :using=>:btree}) + -> 0.0067s +-- create_table("project_custom_attributes", {:force=>:cascade}) + -> 0.0071s +-- add_index("project_custom_attributes", ["key", "value"], {:name=>"index_project_custom_attributes_on_key_and_value", :using=>:btree}) + -> 0.0060s +-- add_index("project_custom_attributes", ["project_id", "key"], {:name=>"index_project_custom_attributes_on_project_id_and_key", :unique=>true, :using=>:btree}) + -> 0.0069s +-- create_table("project_features", {:force=>:cascade}) + -> 0.0100s +-- add_index("project_features", ["project_id"], {:name=>"index_project_features_on_project_id", :using=>:btree}) + -> 0.0069s +-- create_table("project_group_links", {:force=>:cascade}) + -> 0.0117s +-- add_index("project_group_links", ["group_id"], {:name=>"index_project_group_links_on_group_id", :using=>:btree}) + -> 0.0121s +-- add_index("project_group_links", ["project_id"], {:name=>"index_project_group_links_on_project_id", :using=>:btree}) + -> 0.0076s +-- create_table("project_import_data", {:force=>:cascade}) + -> 0.0084s +-- add_index("project_import_data", ["project_id"], {:name=>"index_project_import_data_on_project_id", :using=>:btree}) + -> 0.0058s +-- create_table("project_statistics", {:force=>:cascade}) + -> 0.0075s +-- add_index("project_statistics", ["namespace_id"], {:name=>"index_project_statistics_on_namespace_id", :using=>:btree}) + -> 0.0054s +-- add_index("project_statistics", ["project_id"], {:name=>"index_project_statistics_on_project_id", :unique=>true, :using=>:btree}) + -> 0.0054s +-- create_table("projects", {:force=>:cascade}) + -> 0.0077s +-- add_index("projects", ["ci_id"], {:name=>"index_projects_on_ci_id", :using=>:btree}) + -> 0.0070s +-- add_index("projects", ["created_at"], {:name=>"index_projects_on_created_at", :using=>:btree}) + -> 0.0060s +-- add_index("projects", ["creator_id"], {:name=>"index_projects_on_creator_id", :using=>:btree}) + -> 0.0071s +-- add_index("projects", ["description"], {:name=>"index_projects_on_description_trigram", :using=>:gin, :opclasses=>{"description"=>"gin_trgm_ops"}}) + -> 0.0009s +-- add_index("projects", ["id"], {:name=>"index_projects_on_id_partial_for_visibility", :unique=>true, :where=>"(visibility_level = ANY (ARRAY[10, 20]))", :using=>:btree}) + -> 0.0062s +-- add_index("projects", ["last_activity_at"], {:name=>"index_projects_on_last_activity_at", :using=>:btree}) + -> 0.0060s +-- add_index("projects", ["last_repository_check_failed"], {:name=>"index_projects_on_last_repository_check_failed", :using=>:btree}) + -> 0.0063s +-- add_index("projects", ["last_repository_updated_at"], {:name=>"index_projects_on_last_repository_updated_at", :using=>:btree}) + -> 0.0633s +-- add_index("projects", ["name"], {:name=>"index_projects_on_name_trigram", :using=>:gin, :opclasses=>{"name"=>"gin_trgm_ops"}}) + -> 0.0012s +-- add_index("projects", ["namespace_id"], {:name=>"index_projects_on_namespace_id", :using=>:btree}) + -> 0.0167s +-- add_index("projects", ["path"], {:name=>"index_projects_on_path", :using=>:btree}) + -> 0.0222s +-- add_index("projects", ["path"], {:name=>"index_projects_on_path_trigram", :using=>:gin, :opclasses=>{"path"=>"gin_trgm_ops"}}) + -> 0.0010s +-- add_index("projects", ["pending_delete"], {:name=>"index_projects_on_pending_delete", :using=>:btree}) + -> 0.0229s +-- add_index("projects", ["repository_storage"], {:name=>"index_projects_on_repository_storage", :using=>:btree}) + -> 0.0173s +-- add_index("projects", ["runners_token"], {:name=>"index_projects_on_runners_token", :using=>:btree}) + -> 0.0167s +-- add_index("projects", ["star_count"], {:name=>"index_projects_on_star_count", :using=>:btree}) + -> 0.0491s +-- add_index("projects", ["visibility_level"], {:name=>"index_projects_on_visibility_level", :using=>:btree}) + -> 0.0598s +-- create_table("protected_branch_merge_access_levels", {:force=>:cascade}) + -> 0.1964s +-- add_index("protected_branch_merge_access_levels", ["protected_branch_id"], {:name=>"index_protected_branch_merge_access", :using=>:btree}) + -> 0.1112s +-- create_table("protected_branch_push_access_levels", {:force=>:cascade}) + -> 0.0195s +-- add_index("protected_branch_push_access_levels", ["protected_branch_id"], {:name=>"index_protected_branch_push_access", :using=>:btree}) + -> 0.0069s +-- create_table("protected_branches", {:force=>:cascade}) + -> 0.0113s +-- add_index("protected_branches", ["project_id"], {:name=>"index_protected_branches_on_project_id", :using=>:btree}) + -> 0.0071s +-- create_table("protected_tag_create_access_levels", {:force=>:cascade}) + -> 0.0180s +-- add_index("protected_tag_create_access_levels", ["protected_tag_id"], {:name=>"index_protected_tag_create_access", :using=>:btree}) + -> 0.0068s +-- add_index("protected_tag_create_access_levels", ["user_id"], {:name=>"index_protected_tag_create_access_levels_on_user_id", :using=>:btree}) + -> 0.0077s +-- create_table("protected_tags", {:force=>:cascade}) + -> 0.0115s +-- add_index("protected_tags", ["project_id"], {:name=>"index_protected_tags_on_project_id", :using=>:btree}) + -> 0.0081s +-- create_table("push_event_payloads", {:id=>false, :force=>:cascade}) + -> 0.0108s +-- add_index("push_event_payloads", ["event_id"], {:name=>"index_push_event_payloads_on_event_id", :unique=>true, :using=>:btree}) + -> 0.0189s +-- create_table("redirect_routes", {:force=>:cascade}) + -> 0.0106s +-- add_index("redirect_routes", ["path"], {:name=>"index_redirect_routes_on_path", :unique=>true, :using=>:btree}) + -> 0.0075s +-- add_index("redirect_routes", ["source_type", "source_id"], {:name=>"index_redirect_routes_on_source_type_and_source_id", :using=>:btree}) + -> 0.0099s +-- create_table("releases", {:force=>:cascade}) + -> 0.0126s +-- add_index("releases", ["project_id", "tag"], {:name=>"index_releases_on_project_id_and_tag", :using=>:btree}) + -> 0.0066s +-- add_index("releases", ["project_id"], {:name=>"index_releases_on_project_id", :using=>:btree}) + -> 0.0060s +-- create_table("routes", {:force=>:cascade}) + -> 0.0091s +-- add_index("routes", ["path"], {:name=>"index_routes_on_path", :unique=>true, :using=>:btree}) + -> 0.0073s +-- add_index("routes", ["path"], {:name=>"index_routes_on_path_text_pattern_ops", :using=>:btree, :opclasses=>{"path"=>"varchar_pattern_ops"}}) + -> 0.0004s +-- add_index("routes", ["source_type", "source_id"], {:name=>"index_routes_on_source_type_and_source_id", :unique=>true, :using=>:btree}) + -> 0.0111s +-- create_table("sent_notifications", {:force=>:cascade}) + -> 0.0093s +-- add_index("sent_notifications", ["reply_key"], {:name=>"index_sent_notifications_on_reply_key", :unique=>true, :using=>:btree}) + -> 0.0060s +-- create_table("services", {:force=>:cascade}) + -> 0.0099s +-- add_index("services", ["project_id"], {:name=>"index_services_on_project_id", :using=>:btree}) + -> 0.0068s +-- add_index("services", ["template"], {:name=>"index_services_on_template", :using=>:btree}) + -> 0.0076s +-- create_table("snippets", {:force=>:cascade}) + -> 0.0073s +-- add_index("snippets", ["author_id"], {:name=>"index_snippets_on_author_id", :using=>:btree}) + -> 0.0055s +-- add_index("snippets", ["file_name"], {:name=>"index_snippets_on_file_name_trigram", :using=>:gin, :opclasses=>{"file_name"=>"gin_trgm_ops"}}) + -> 0.0006s +-- add_index("snippets", ["project_id"], {:name=>"index_snippets_on_project_id", :using=>:btree}) + -> 0.0058s +-- add_index("snippets", ["title"], {:name=>"index_snippets_on_title_trigram", :using=>:gin, :opclasses=>{"title"=>"gin_trgm_ops"}}) + -> 0.0005s +-- add_index("snippets", ["updated_at"], {:name=>"index_snippets_on_updated_at", :using=>:btree}) + -> 0.0100s +-- add_index("snippets", ["visibility_level"], {:name=>"index_snippets_on_visibility_level", :using=>:btree}) + -> 0.0091s +-- create_table("spam_logs", {:force=>:cascade}) + -> 0.0129s +-- create_table("subscriptions", {:force=>:cascade}) + -> 0.0094s +-- add_index("subscriptions", ["subscribable_id", "subscribable_type", "user_id", "project_id"], {:name=>"index_subscriptions_on_subscribable_and_user_id_and_project_id", :unique=>true, :using=>:btree}) + -> 0.0107s +-- create_table("system_note_metadata", {:force=>:cascade}) + -> 0.0138s +-- add_index("system_note_metadata", ["note_id"], {:name=>"index_system_note_metadata_on_note_id", :unique=>true, :using=>:btree}) + -> 0.0060s +-- create_table("taggings", {:force=>:cascade}) + -> 0.0121s +-- add_index("taggings", ["tag_id", "taggable_id", "taggable_type", "context", "tagger_id", "tagger_type"], {:name=>"taggings_idx", :unique=>true, :using=>:btree}) + -> 0.0078s +-- add_index("taggings", ["tag_id"], {:name=>"index_taggings_on_tag_id", :using=>:btree}) + -> 0.0058s +-- add_index("taggings", ["taggable_id", "taggable_type", "context"], {:name=>"index_taggings_on_taggable_id_and_taggable_type_and_context", :using=>:btree}) + -> 0.0059s +-- add_index("taggings", ["taggable_id", "taggable_type"], {:name=>"index_taggings_on_taggable_id_and_taggable_type", :using=>:btree}) + -> 0.0056s +-- create_table("tags", {:force=>:cascade}) + -> 0.0063s +-- add_index("tags", ["name"], {:name=>"index_tags_on_name", :unique=>true, :using=>:btree}) + -> 0.0055s +-- create_table("timelogs", {:force=>:cascade}) + -> 0.0061s +-- add_index("timelogs", ["issue_id"], {:name=>"index_timelogs_on_issue_id", :using=>:btree}) + -> 0.0063s +-- add_index("timelogs", ["merge_request_id"], {:name=>"index_timelogs_on_merge_request_id", :using=>:btree}) + -> 0.0052s +-- add_index("timelogs", ["user_id"], {:name=>"index_timelogs_on_user_id", :using=>:btree}) + -> 0.0055s +-- create_table("todos", {:force=>:cascade}) + -> 0.0065s +-- add_index("todos", ["author_id"], {:name=>"index_todos_on_author_id", :using=>:btree}) + -> 0.0081s +-- add_index("todos", ["commit_id"], {:name=>"index_todos_on_commit_id", :using=>:btree}) + -> 0.0085s +-- add_index("todos", ["note_id"], {:name=>"index_todos_on_note_id", :using=>:btree}) + -> 0.0083s +-- add_index("todos", ["project_id"], {:name=>"index_todos_on_project_id", :using=>:btree}) + -> 0.0094s +-- add_index("todos", ["target_type", "target_id"], {:name=>"index_todos_on_target_type_and_target_id", :using=>:btree}) + -> 0.0070s +-- add_index("todos", ["user_id", "id"], {:name=>"index_todos_on_user_id_and_id_done", :where=>"((state)::text = 'done'::text)", :using=>:btree}) + -> 0.0099s +-- add_index("todos", ["user_id", "id"], {:name=>"index_todos_on_user_id_and_id_pending", :where=>"((state)::text = 'pending'::text)", :using=>:btree}) + -> 0.0080s +-- add_index("todos", ["user_id"], {:name=>"index_todos_on_user_id", :using=>:btree}) + -> 0.0061s +-- create_table("trending_projects", {:force=>:cascade}) + -> 0.0081s +-- add_index("trending_projects", ["project_id"], {:name=>"index_trending_projects_on_project_id", :unique=>true, :using=>:btree}) + -> 0.0046s +-- create_table("u2f_registrations", {:force=>:cascade}) + -> 0.0063s +-- add_index("u2f_registrations", ["key_handle"], {:name=>"index_u2f_registrations_on_key_handle", :using=>:btree}) + -> 0.0052s +-- add_index("u2f_registrations", ["user_id"], {:name=>"index_u2f_registrations_on_user_id", :using=>:btree}) + -> 0.0072s +-- create_table("uploads", {:force=>:cascade}) + -> 0.0067s +-- add_index("uploads", ["checksum"], {:name=>"index_uploads_on_checksum", :using=>:btree}) + -> 0.0046s +-- add_index("uploads", ["model_id", "model_type"], {:name=>"index_uploads_on_model_id_and_model_type", :using=>:btree}) + -> 0.0049s +-- add_index("uploads", ["uploader", "path"], {:name=>"index_uploads_on_uploader_and_path", :using=>:btree}) + -> 0.0052s +-- create_table("user_agent_details", {:force=>:cascade}) + -> 0.0059s +-- add_index("user_agent_details", ["subject_id", "subject_type"], {:name=>"index_user_agent_details_on_subject_id_and_subject_type", :using=>:btree}) + -> 0.0052s +-- create_table("user_callouts", {:force=>:cascade}) + -> 0.0059s +-- add_index("user_callouts", ["user_id", "feature_name"], {:name=>"index_user_callouts_on_user_id_and_feature_name", :unique=>true, :using=>:btree}) + -> 0.0094s +-- add_index("user_callouts", ["user_id"], {:name=>"index_user_callouts_on_user_id", :using=>:btree}) + -> 0.0064s +-- create_table("user_custom_attributes", {:force=>:cascade}) + -> 0.0086s +-- add_index("user_custom_attributes", ["key", "value"], {:name=>"index_user_custom_attributes_on_key_and_value", :using=>:btree}) + -> 0.0080s +-- add_index("user_custom_attributes", ["user_id", "key"], {:name=>"index_user_custom_attributes_on_user_id_and_key", :unique=>true, :using=>:btree}) + -> 0.0066s +-- create_table("user_interacted_projects", {:id=>false, :force=>:cascade}) + -> 0.0108s +-- add_index("user_interacted_projects", ["project_id", "user_id"], {:name=>"index_user_interacted_projects_on_project_id_and_user_id", :unique=>true, :using=>:btree}) + -> 0.0114s +-- add_index("user_interacted_projects", ["user_id"], {:name=>"index_user_interacted_projects_on_user_id", :using=>:btree}) + -> 0.0056s +-- create_table("user_synced_attributes_metadata", {:force=>:cascade}) + -> 0.0115s +-- add_index("user_synced_attributes_metadata", ["user_id"], {:name=>"index_user_synced_attributes_metadata_on_user_id", :unique=>true, :using=>:btree}) + -> 0.0054s +-- create_table("users", {:force=>:cascade}) + -> 0.0111s +-- add_index("users", ["admin"], {:name=>"index_users_on_admin", :using=>:btree}) + -> 0.0065s +-- add_index("users", ["confirmation_token"], {:name=>"index_users_on_confirmation_token", :unique=>true, :using=>:btree}) + -> 0.0065s +-- add_index("users", ["created_at"], {:name=>"index_users_on_created_at", :using=>:btree}) + -> 0.0068s +-- add_index("users", ["email"], {:name=>"index_users_on_email", :unique=>true, :using=>:btree}) + -> 0.0066s +-- add_index("users", ["email"], {:name=>"index_users_on_email_trigram", :using=>:gin, :opclasses=>{"email"=>"gin_trgm_ops"}}) + -> 0.0011s +-- add_index("users", ["ghost"], {:name=>"index_users_on_ghost", :using=>:btree}) + -> 0.0063s +-- add_index("users", ["incoming_email_token"], {:name=>"index_users_on_incoming_email_token", :using=>:btree}) + -> 0.0057s +-- add_index("users", ["name"], {:name=>"index_users_on_name", :using=>:btree}) + -> 0.0056s +-- add_index("users", ["name"], {:name=>"index_users_on_name_trigram", :using=>:gin, :opclasses=>{"name"=>"gin_trgm_ops"}}) + -> 0.0011s +-- add_index("users", ["reset_password_token"], {:name=>"index_users_on_reset_password_token", :unique=>true, :using=>:btree}) + -> 0.0055s +-- add_index("users", ["rss_token"], {:name=>"index_users_on_rss_token", :using=>:btree}) + -> 0.0068s +-- add_index("users", ["state"], {:name=>"index_users_on_state", :using=>:btree}) + -> 0.0067s +-- add_index("users", ["username"], {:name=>"index_users_on_username", :using=>:btree}) + -> 0.0072s +-- add_index("users", ["username"], {:name=>"index_users_on_username_trigram", :using=>:gin, :opclasses=>{"username"=>"gin_trgm_ops"}}) + -> 0.0012s +-- create_table("users_star_projects", {:force=>:cascade}) + -> 0.0100s +-- add_index("users_star_projects", ["project_id"], {:name=>"index_users_star_projects_on_project_id", :using=>:btree}) + -> 0.0061s +-- add_index("users_star_projects", ["user_id", "project_id"], {:name=>"index_users_star_projects_on_user_id_and_project_id", :unique=>true, :using=>:btree}) + -> 0.0068s +-- create_table("web_hook_logs", {:force=>:cascade}) + -> 0.0097s +-- add_index("web_hook_logs", ["web_hook_id"], {:name=>"index_web_hook_logs_on_web_hook_id", :using=>:btree}) + -> 0.0057s +-- create_table("web_hooks", {:force=>:cascade}) + -> 0.0080s +-- add_index("web_hooks", ["project_id"], {:name=>"index_web_hooks_on_project_id", :using=>:btree}) + -> 0.0062s +-- add_index("web_hooks", ["type"], {:name=>"index_web_hooks_on_type", :using=>:btree}) + -> 0.0065s +-- add_foreign_key("badges", "namespaces", {:column=>"group_id", :on_delete=>:cascade}) + -> 0.0158s +-- add_foreign_key("badges", "projects", {:on_delete=>:cascade}) + -> 0.0140s +-- add_foreign_key("boards", "namespaces", {:column=>"group_id", :on_delete=>:cascade}) + -> 0.0138s +-- add_foreign_key("boards", "projects", {:name=>"fk_f15266b5f9", :on_delete=>:cascade}) + -> 0.0118s +-- add_foreign_key("chat_teams", "namespaces", {:on_delete=>:cascade}) + -> 0.0130s +-- add_foreign_key("ci_build_trace_section_names", "projects", {:on_delete=>:cascade}) + -> 0.0131s +-- add_foreign_key("ci_build_trace_sections", "ci_build_trace_section_names", {:column=>"section_name_id", :name=>"fk_264e112c66", :on_delete=>:cascade}) + -> 0.0210s +-- add_foreign_key("ci_build_trace_sections", "ci_builds", {:column=>"build_id", :name=>"fk_4ebe41f502", :on_delete=>:cascade}) + -> 0.0823s +-- add_foreign_key("ci_build_trace_sections", "projects", {:on_delete=>:cascade}) + -> 0.0942s +-- add_foreign_key("ci_builds", "ci_pipelines", {:column=>"auto_canceled_by_id", :name=>"fk_a2141b1522", :on_delete=>:nullify}) + -> 0.1346s +-- add_foreign_key("ci_builds", "ci_stages", {:column=>"stage_id", :name=>"fk_3a9eaa254d", :on_delete=>:cascade}) + -> 0.0506s +-- add_foreign_key("ci_builds", "projects", {:name=>"fk_befce0568a", :on_delete=>:cascade}) + -> 0.0403s +-- add_foreign_key("ci_builds_metadata", "ci_builds", {:column=>"build_id", :on_delete=>:cascade}) + -> 0.0160s +-- add_foreign_key("ci_builds_metadata", "projects", {:on_delete=>:cascade}) + -> 0.0165s +-- add_foreign_key("ci_group_variables", "namespaces", {:column=>"group_id", :name=>"fk_33ae4d58d8", :on_delete=>:cascade}) + -> 0.0153s +-- add_foreign_key("ci_job_artifacts", "ci_builds", {:column=>"job_id", :on_delete=>:cascade}) + -> 0.0160s +-- add_foreign_key("ci_job_artifacts", "projects", {:on_delete=>:cascade}) + -> 0.0278s +-- add_foreign_key("ci_pipeline_schedule_variables", "ci_pipeline_schedules", {:column=>"pipeline_schedule_id", :name=>"fk_41c35fda51", :on_delete=>:cascade}) + -> 0.0193s +-- add_foreign_key("ci_pipeline_schedules", "projects", {:name=>"fk_8ead60fcc4", :on_delete=>:cascade}) + -> 0.0184s +-- add_foreign_key("ci_pipeline_schedules", "users", {:column=>"owner_id", :name=>"fk_9ea99f58d2", :on_delete=>:nullify}) + -> 0.0158s +-- add_foreign_key("ci_pipeline_variables", "ci_pipelines", {:column=>"pipeline_id", :name=>"fk_f29c5f4380", :on_delete=>:cascade}) + -> 0.0097s +-- add_foreign_key("ci_pipelines", "ci_pipeline_schedules", {:column=>"pipeline_schedule_id", :name=>"fk_3d34ab2e06", :on_delete=>:nullify}) + -> 0.0693s +-- add_foreign_key("ci_pipelines", "ci_pipelines", {:column=>"auto_canceled_by_id", :name=>"fk_262d4c2d19", :on_delete=>:nullify}) + -> 0.1599s +-- add_foreign_key("ci_pipelines", "projects", {:name=>"fk_86635dbd80", :on_delete=>:cascade}) + -> 0.1505s +-- add_foreign_key("ci_runner_projects", "projects", {:name=>"fk_4478a6f1e4", :on_delete=>:cascade}) + -> 0.0984s +-- add_foreign_key("ci_stages", "ci_pipelines", {:column=>"pipeline_id", :name=>"fk_fb57e6cc56", :on_delete=>:cascade}) + -> 0.1152s +-- add_foreign_key("ci_stages", "projects", {:name=>"fk_2360681d1d", :on_delete=>:cascade}) + -> 0.1062s +-- add_foreign_key("ci_trigger_requests", "ci_triggers", {:column=>"trigger_id", :name=>"fk_b8ec8b7245", :on_delete=>:cascade}) + -> 0.0455s +-- add_foreign_key("ci_triggers", "projects", {:name=>"fk_e3e63f966e", :on_delete=>:cascade}) + -> 0.0725s +-- add_foreign_key("ci_triggers", "users", {:column=>"owner_id", :name=>"fk_e8e10d1964", :on_delete=>:cascade}) + -> 0.0774s +-- add_foreign_key("ci_variables", "projects", {:name=>"fk_ada5eb64b3", :on_delete=>:cascade}) + -> 0.0626s +-- add_foreign_key("cluster_platforms_kubernetes", "clusters", {:on_delete=>:cascade}) + -> 0.0529s +-- add_foreign_key("cluster_projects", "clusters", {:on_delete=>:cascade}) + -> 0.0678s +-- add_foreign_key("cluster_projects", "projects", {:on_delete=>:cascade}) + -> 0.0391s +-- add_foreign_key("cluster_providers_gcp", "clusters", {:on_delete=>:cascade}) + -> 0.0328s +-- add_foreign_key("clusters", "users", {:on_delete=>:nullify}) + -> 0.1266s +-- add_foreign_key("clusters_applications_helm", "clusters", {:on_delete=>:cascade}) + -> 0.0489s +-- add_foreign_key("clusters_applications_ingress", "clusters", {:name=>"fk_753a7b41c1", :on_delete=>:cascade}) + -> 0.0565s +-- add_foreign_key("clusters_applications_prometheus", "clusters", {:name=>"fk_557e773639", :on_delete=>:cascade}) + -> 0.0174s +-- add_foreign_key("clusters_applications_runners", "ci_runners", {:column=>"runner_id", :name=>"fk_02de2ded36", :on_delete=>:nullify}) + -> 0.0182s +-- add_foreign_key("clusters_applications_runners", "clusters", {:on_delete=>:cascade}) + -> 0.0208s +-- add_foreign_key("container_repositories", "projects") + -> 0.0186s +-- add_foreign_key("deploy_keys_projects", "projects", {:name=>"fk_58a901ca7e", :on_delete=>:cascade}) + -> 0.0140s +-- add_foreign_key("deployments", "projects", {:name=>"fk_b9a3851b82", :on_delete=>:cascade}) + -> 0.0328s +-- add_foreign_key("environments", "projects", {:name=>"fk_d1c8c1da6a", :on_delete=>:cascade}) + -> 0.0221s +-- add_foreign_key("events", "projects", {:on_delete=>:cascade}) + -> 0.0212s +-- add_foreign_key("events", "users", {:column=>"author_id", :name=>"fk_edfd187b6f", :on_delete=>:cascade}) + -> 0.0150s +-- add_foreign_key("fork_network_members", "fork_networks", {:on_delete=>:cascade}) + -> 0.0134s +-- add_foreign_key("fork_network_members", "projects", {:column=>"forked_from_project_id", :name=>"fk_b01280dae4", :on_delete=>:nullify}) + -> 0.0200s +-- add_foreign_key("fork_network_members", "projects", {:on_delete=>:cascade}) + -> 0.0162s +-- add_foreign_key("fork_networks", "projects", {:column=>"root_project_id", :name=>"fk_e7b436b2b5", :on_delete=>:nullify}) + -> 0.0138s +-- add_foreign_key("forked_project_links", "projects", {:column=>"forked_to_project_id", :name=>"fk_434510edb0", :on_delete=>:cascade}) + -> 0.0137s +-- add_foreign_key("gcp_clusters", "projects", {:on_delete=>:cascade}) + -> 0.0148s +-- add_foreign_key("gcp_clusters", "services", {:on_delete=>:nullify}) + -> 0.0216s +-- add_foreign_key("gcp_clusters", "users", {:on_delete=>:nullify}) + -> 0.0156s +-- add_foreign_key("gpg_key_subkeys", "gpg_keys", {:on_delete=>:cascade}) + -> 0.0139s +-- add_foreign_key("gpg_keys", "users", {:on_delete=>:cascade}) + -> 0.0142s +-- add_foreign_key("gpg_signatures", "gpg_key_subkeys", {:on_delete=>:nullify}) + -> 0.0216s +-- add_foreign_key("gpg_signatures", "gpg_keys", {:on_delete=>:nullify}) + -> 0.0211s +-- add_foreign_key("gpg_signatures", "projects", {:on_delete=>:cascade}) + -> 0.0215s +-- add_foreign_key("group_custom_attributes", "namespaces", {:column=>"group_id", :on_delete=>:cascade}) + -> 0.0174s +-- add_foreign_key("internal_ids", "projects", {:on_delete=>:cascade}) + -> 0.0143s +-- add_foreign_key("issue_assignees", "issues", {:name=>"fk_b7d881734a", :on_delete=>:cascade}) + -> 0.0139s +-- add_foreign_key("issue_assignees", "users", {:name=>"fk_5e0c8d9154", :on_delete=>:cascade}) + -> 0.0138s +-- add_foreign_key("issue_metrics", "issues", {:on_delete=>:cascade}) + -> 0.0106s +-- add_foreign_key("issues", "issues", {:column=>"moved_to_id", :name=>"fk_a194299be1", :on_delete=>:nullify}) + -> 0.0366s +-- add_foreign_key("issues", "milestones", {:name=>"fk_96b1dd429c", :on_delete=>:nullify}) + -> 0.0309s +-- add_foreign_key("issues", "projects", {:name=>"fk_899c8f3231", :on_delete=>:cascade}) + -> 0.0314s +-- add_foreign_key("issues", "users", {:column=>"author_id", :name=>"fk_05f1e72feb", :on_delete=>:nullify}) + -> 0.0504s +-- add_foreign_key("issues", "users", {:column=>"closed_by_id", :name=>"fk_c63cbf6c25", :on_delete=>:nullify}) + -> 0.0428s +-- add_foreign_key("issues", "users", {:column=>"updated_by_id", :name=>"fk_ffed080f01", :on_delete=>:nullify}) + -> 0.0333s +-- add_foreign_key("label_priorities", "labels", {:on_delete=>:cascade}) + -> 0.0143s +-- add_foreign_key("label_priorities", "projects", {:on_delete=>:cascade}) + -> 0.0160s +-- add_foreign_key("labels", "namespaces", {:column=>"group_id", :on_delete=>:cascade}) + -> 0.0176s +-- add_foreign_key("labels", "projects", {:name=>"fk_7de4989a69", :on_delete=>:cascade}) + -> 0.0216s +-- add_foreign_key("lfs_file_locks", "projects", {:on_delete=>:cascade}) + -> 0.0144s +-- add_foreign_key("lfs_file_locks", "users", {:on_delete=>:cascade}) + -> 0.0178s +-- add_foreign_key("lists", "boards", {:name=>"fk_0d3f677137", :on_delete=>:cascade}) + -> 0.0161s +-- add_foreign_key("lists", "labels", {:name=>"fk_7a5553d60f", :on_delete=>:cascade}) + -> 0.0137s +-- add_foreign_key("members", "users", {:name=>"fk_2e88fb7ce9", :on_delete=>:cascade}) + -> 0.0171s +-- add_foreign_key("merge_request_diff_commits", "merge_request_diffs", {:on_delete=>:cascade}) + -> 0.0143s +-- add_foreign_key("merge_request_diff_files", "merge_request_diffs", {:on_delete=>:cascade}) + -> 0.0106s +-- add_foreign_key("merge_request_diffs", "merge_requests", {:name=>"fk_8483f3258f", :on_delete=>:cascade}) + -> 0.0119s +-- add_foreign_key("merge_request_metrics", "ci_pipelines", {:column=>"pipeline_id", :on_delete=>:cascade}) + -> 0.0163s +-- add_foreign_key("merge_request_metrics", "merge_requests", {:on_delete=>:cascade}) + -> 0.0204s +-- add_foreign_key("merge_request_metrics", "users", {:column=>"latest_closed_by_id", :name=>"fk_ae440388cc", :on_delete=>:nullify}) + -> 0.0196s +-- add_foreign_key("merge_request_metrics", "users", {:column=>"merged_by_id", :name=>"fk_7f28d925f3", :on_delete=>:nullify}) + -> 0.0202s +-- add_foreign_key("merge_requests", "ci_pipelines", {:column=>"head_pipeline_id", :name=>"fk_fd82eae0b9", :on_delete=>:nullify}) + -> 0.0394s +-- add_foreign_key("merge_requests", "merge_request_diffs", {:column=>"latest_merge_request_diff_id", :name=>"fk_06067f5644", :on_delete=>:nullify}) + -> 0.0532s +-- add_foreign_key("merge_requests", "milestones", {:name=>"fk_6a5165a692", :on_delete=>:nullify}) + -> 0.0291s +-- add_foreign_key("merge_requests", "projects", {:column=>"source_project_id", :name=>"fk_3308fe130c", :on_delete=>:nullify}) + -> 0.0278s +-- add_foreign_key("merge_requests", "projects", {:column=>"target_project_id", :name=>"fk_a6963e8447", :on_delete=>:cascade}) + -> 0.0367s +-- add_foreign_key("merge_requests", "users", {:column=>"assignee_id", :name=>"fk_6149611a04", :on_delete=>:nullify}) + -> 0.0327s +-- add_foreign_key("merge_requests", "users", {:column=>"author_id", :name=>"fk_e719a85f8a", :on_delete=>:nullify}) + -> 0.0337s +-- add_foreign_key("merge_requests", "users", {:column=>"merge_user_id", :name=>"fk_ad525e1f87", :on_delete=>:nullify}) + -> 0.0517s +-- add_foreign_key("merge_requests", "users", {:column=>"updated_by_id", :name=>"fk_641731faff", :on_delete=>:nullify}) + -> 0.0335s +-- add_foreign_key("merge_requests_closing_issues", "issues", {:on_delete=>:cascade}) + -> 0.0167s +-- add_foreign_key("merge_requests_closing_issues", "merge_requests", {:on_delete=>:cascade}) + -> 0.0191s +-- add_foreign_key("milestones", "namespaces", {:column=>"group_id", :name=>"fk_95650a40d4", :on_delete=>:cascade}) + -> 0.0206s +-- add_foreign_key("milestones", "projects", {:name=>"fk_9bd0a0c791", :on_delete=>:cascade}) + -> 0.0221s +-- add_foreign_key("notes", "projects", {:name=>"fk_99e097b079", :on_delete=>:cascade}) + -> 0.0332s +-- add_foreign_key("oauth_openid_requests", "oauth_access_grants", {:column=>"access_grant_id", :name=>"fk_oauth_openid_requests_oauth_access_grants_access_grant_id"}) + -> 0.0128s +-- add_foreign_key("pages_domains", "projects", {:name=>"fk_ea2f6dfc6f", :on_delete=>:cascade}) + -> 0.0220s +-- add_foreign_key("personal_access_tokens", "users") + -> 0.0187s +-- add_foreign_key("project_authorizations", "projects", {:on_delete=>:cascade}) + -> 0.0149s +-- add_foreign_key("project_authorizations", "users", {:on_delete=>:cascade}) + -> 0.0167s +-- add_foreign_key("project_auto_devops", "projects", {:on_delete=>:cascade}) + -> 0.0142s +-- add_foreign_key("project_custom_attributes", "projects", {:on_delete=>:cascade}) + -> 0.0218s +-- add_foreign_key("project_features", "projects", {:name=>"fk_18513d9b92", :on_delete=>:cascade}) + -> 0.0204s +-- add_foreign_key("project_group_links", "projects", {:name=>"fk_daa8cee94c", :on_delete=>:cascade}) + -> 0.0174s +-- add_foreign_key("project_import_data", "projects", {:name=>"fk_ffb9ee3a10", :on_delete=>:cascade}) + -> 0.0138s +-- add_foreign_key("project_statistics", "projects", {:on_delete=>:cascade}) + -> 0.0125s +-- add_foreign_key("protected_branch_merge_access_levels", "protected_branches", {:name=>"fk_8a3072ccb3", :on_delete=>:cascade}) + -> 0.0157s +-- add_foreign_key("protected_branch_push_access_levels", "protected_branches", {:name=>"fk_9ffc86a3d9", :on_delete=>:cascade}) + -> 0.0112s +-- add_foreign_key("protected_branches", "projects", {:name=>"fk_7a9c6d93e7", :on_delete=>:cascade}) + -> 0.0122s +-- add_foreign_key("protected_tag_create_access_levels", "namespaces", {:column=>"group_id"}) + -> 0.0131s +-- add_foreign_key("protected_tag_create_access_levels", "protected_tags", {:name=>"fk_f7dfda8c51", :on_delete=>:cascade}) + -> 0.0168s +-- add_foreign_key("protected_tag_create_access_levels", "users") + -> 0.0221s +-- add_foreign_key("protected_tags", "projects", {:name=>"fk_8e4af87648", :on_delete=>:cascade}) + -> 0.0135s +-- add_foreign_key("push_event_payloads", "events", {:name=>"fk_36c74129da", :on_delete=>:cascade}) + -> 0.0107s +-- add_foreign_key("releases", "projects", {:name=>"fk_47fe2a0596", :on_delete=>:cascade}) + -> 0.0131s +-- add_foreign_key("services", "projects", {:name=>"fk_71cce407f9", :on_delete=>:cascade}) + -> 0.0142s +-- add_foreign_key("snippets", "projects", {:name=>"fk_be41fd4bb7", :on_delete=>:cascade}) + -> 0.0178s +-- add_foreign_key("subscriptions", "projects", {:on_delete=>:cascade}) + -> 0.0160s +-- add_foreign_key("system_note_metadata", "notes", {:name=>"fk_d83a918cb1", :on_delete=>:cascade}) + -> 0.0156s +-- add_foreign_key("timelogs", "issues", {:name=>"fk_timelogs_issues_issue_id", :on_delete=>:cascade}) + -> 0.0183s +-- add_foreign_key("timelogs", "merge_requests", {:name=>"fk_timelogs_merge_requests_merge_request_id", :on_delete=>:cascade}) + -> 0.0198s +-- add_foreign_key("todos", "notes", {:name=>"fk_91d1f47b13", :on_delete=>:cascade}) + -> 0.0276s +-- add_foreign_key("todos", "projects", {:name=>"fk_45054f9c45", :on_delete=>:cascade}) + -> 0.0175s +-- add_foreign_key("todos", "users", {:column=>"author_id", :name=>"fk_ccf0373936", :on_delete=>:cascade}) + -> 0.0182s +-- add_foreign_key("todos", "users", {:name=>"fk_d94154aa95", :on_delete=>:cascade}) + -> 0.0184s +-- add_foreign_key("trending_projects", "projects", {:on_delete=>:cascade}) + -> 0.0338s +-- add_foreign_key("u2f_registrations", "users") + -> 0.0176s +-- add_foreign_key("user_callouts", "users", {:on_delete=>:cascade}) + -> 0.0160s +-- add_foreign_key("user_custom_attributes", "users", {:on_delete=>:cascade}) + -> 0.0191s +-- add_foreign_key("user_interacted_projects", "projects", {:name=>"fk_722ceba4f7", :on_delete=>:cascade}) + -> 0.0171s +-- add_foreign_key("user_interacted_projects", "users", {:name=>"fk_0894651f08", :on_delete=>:cascade}) + -> 0.0155s +-- add_foreign_key("user_synced_attributes_metadata", "users", {:on_delete=>:cascade}) + -> 0.0164s +-- add_foreign_key("users_star_projects", "projects", {:name=>"fk_22cd27ddfc", :on_delete=>:cascade}) + -> 0.0180s +-- add_foreign_key("web_hook_logs", "web_hooks", {:on_delete=>:cascade}) + -> 0.0164s +-- add_foreign_key("web_hooks", "projects", {:name=>"fk_0c8ca6d9d1", :on_delete=>:cascade}) + -> 0.0172s +-- initialize_schema_migrations_table() + -> 0.0212s +Adding limits to schema.rb for mysql +-- column_exists?(:merge_request_diffs, :st_commits) + -> 0.0010s +-- column_exists?(:merge_request_diffs, :st_diffs) + -> 0.0006s +-- change_column(:snippets, :content, :text, {:limit=>2147483647}) + -> 0.0308s +-- change_column(:notes, :st_diff, :text, {:limit=>2147483647}) + -> 0.0366s +-- change_column(:snippets, :content_html, :text, {:limit=>2147483647}) + -> 0.0272s +-- change_column(:merge_request_diff_files, :diff, :text, {:limit=>2147483647}) + -> 0.0170s +$ date +Thu Apr 5 11:19:41 UTC 2018 +$ JOB_NAME=( $CI_JOB_NAME ) +$ export CI_NODE_INDEX=${JOB_NAME[-2]} +$ export CI_NODE_TOTAL=${JOB_NAME[-1]} +$ export KNAPSACK_REPORT_PATH=knapsack/${CI_PROJECT_NAME}/${JOB_NAME[0]}_node_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json +$ export KNAPSACK_GENERATE_REPORT=true +$ export SUITE_FLAKY_RSPEC_REPORT_PATH=${FLAKY_RSPEC_SUITE_REPORT_PATH} +$ export FLAKY_RSPEC_REPORT_PATH=rspec_flaky/all_${JOB_NAME[0]}_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json +$ export NEW_FLAKY_RSPEC_REPORT_PATH=rspec_flaky/new_${JOB_NAME[0]}_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json +$ export FLAKY_RSPEC_GENERATE_REPORT=true +$ export CACHE_CLASSES=true +$ cp ${KNAPSACK_RSPEC_SUITE_REPORT_PATH} ${KNAPSACK_REPORT_PATH} +$ [[ -f $FLAKY_RSPEC_REPORT_PATH ]] || echo "{}" > ${FLAKY_RSPEC_REPORT_PATH} +$ [[ -f $NEW_FLAKY_RSPEC_REPORT_PATH ]] || echo "{}" > ${NEW_FLAKY_RSPEC_REPORT_PATH} +$ scripts/gitaly-test-spawn +59 +$ knapsack rspec "--color --format documentation" + +Report specs: +spec/services/todo_service_spec.rb +spec/lib/gitlab/import_export/project_tree_saver_spec.rb +spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb +spec/controllers/projects/merge_requests_controller_spec.rb +spec/controllers/groups_controller_spec.rb +spec/features/projects/import_export/import_file_spec.rb +spec/lib/gitlab/middleware/go_spec.rb +spec/services/groups/transfer_service_spec.rb +spec/features/projects/blobs/edit_spec.rb +spec/services/boards/lists/move_service_spec.rb +spec/services/create_deployment_service_spec.rb +spec/controllers/groups/milestones_controller_spec.rb +spec/helpers/groups_helper_spec.rb +spec/requests/api/v3/todos_spec.rb +spec/models/project_services/teamcity_service_spec.rb +spec/lib/gitlab/conflict/file_spec.rb +spec/lib/banzai/filter/snippet_reference_filter_spec.rb +spec/finders/autocomplete_users_finder_spec.rb +spec/models/service_spec.rb +spec/services/test_hooks/project_service_spec.rb +spec/features/projects/merge_requests/user_views_open_merge_request_spec.rb +spec/finders/runner_jobs_finder_spec.rb +spec/features/projects/snippets_spec.rb +spec/requests/api/v3/environments_spec.rb +spec/requests/api/namespaces_spec.rb +spec/services/merge_requests/get_urls_service_spec.rb +spec/models/lfs_file_lock_spec.rb +spec/lib/gitlab/ci/config/entry/boolean_spec.rb + +Leftover specs: + +Knapsack report generator started! + +==> Setting up GitLab Shell... + GitLab Shell setup in 0.307428917 seconds... + +==> Setting up Gitaly... + Gitaly setup in 0.000135767 seconds... + +TodoService + updates cached counts when a todo is created + Issues + #new_issue + creates a todo if assigned + does not create a todo if unassigned + creates a todo if assignee is the current user + creates a todo for each valid mentioned user + creates a directly addressed todo for each valid addressed user + creates correct todos for each valid user based on the type of mention + does not create todo if user can not see the issue when issue is confidential + does not create directly addressed todo if user cannot see the issue when issue is confidential + when a private group is mentioned + creates a todo for group members + #update_issue + creates a todo for each valid mentioned user not included in skip_users + creates a todo for each valid user not included in skip_users based on the type of mention + creates a directly addressed todo for each valid addressed user not included in skip_users + does not create a todo if user was already mentioned and todo is pending + does not create a todo if user was already mentioned and todo is done + does not create a directly addressed todo if user was already mentioned or addressed and todo is pending + does not create a directly addressed todo if user was already mentioned or addressed and todo is done + does not create todo if user can not see the issue when issue is confidential + does not create a directly addressed todo if user can not see the issue when issue is confidential + issues with a task list + does not create todo when tasks are marked as completed + does not create directly addressed todo when tasks are marked as completed + does not raise an error when description not change + #close_issue + marks related pending todos to the target for the user as done + #destroy_target + refreshes the todos count cache for users with todos on the target + does not refresh the todos count cache for users with only done todos on the target + yields the target to the caller + #reassigned_issue + creates a pending todo for new assignee + does not create a todo if unassigned + creates a todo if new assignee is the current user + #mark_pending_todos_as_done + marks related pending todos to the target for the user as done + cached counts + updates when todos change + #mark_todos_as_done + behaves like updating todos state + updates related todos for the user with the new_state + returns the updated ids + cached counts + updates when todos change + #mark_todos_as_done_by_ids + behaves like updating todos state + updates related todos for the user with the new_state + returns the updated ids + cached counts + updates when todos change + #mark_todos_as_pending + behaves like updating todos state + updates related todos for the user with the new_state + returns the updated ids + cached counts + updates when todos change + #mark_todos_as_pending_by_ids + behaves like updating todos state + updates related todos for the user with the new_state + returns the updated ids + cached counts + updates when todos change + #new_note + mark related pending todos to the noteable for the note author as done + does not mark related pending todos it is a system note + creates a todo for each valid mentioned user + creates a todo for each valid user based on the type of mention + creates a directly addressed todo for each valid addressed user + does not create todo if user can not see the issue when leaving a note on a confidential issue + does not create a directly addressed todo if user can not see the issue when leaving a note on a confidential issue + does not create todo when leaving a note on snippet + on commit + creates a todo for each valid mentioned user when leaving a note on commit + creates a directly addressed todo for each valid mentioned user when leaving a note on commit + #mark_todo + creates a todo from a issue + #todo_exists? + returns false when no todo exist for the given issuable + returns true when a todo exist for the given issuable + Merge Requests + #new_merge_request + creates a pending todo if assigned + does not create a todo if unassigned + does not create a todo if assignee is the current user + creates a todo for each valid mentioned user + creates a todo for each valid user based on the type of mention + creates a directly addressed todo for each valid addressed user + #update_merge_request + creates a todo for each valid mentioned user not included in skip_users + creates a todo for each valid user not included in skip_users based on the type of mention + creates a directly addressed todo for each valid addressed user not included in skip_users + does not create a todo if user was already mentioned and todo is pending + does not create a todo if user was already mentioned and todo is done + does not create a directly addressed todo if user was already mentioned or addressed and todo is pending + does not create a directly addressed todo if user was already mentioned or addressed and todo is done + with a task list + does not create todo when tasks are marked as completed + does not create directly addressed todo when tasks are marked as completed + does not raise an error when description not change + #close_merge_request + marks related pending todos to the target for the user as done + #reassigned_merge_request + creates a pending todo for new assignee + does not create a todo if unassigned + creates a todo if new assignee is the current user + does not create a todo for guests + does not create a directly addressed todo for guests + #merge_merge_request + marks related pending todos to the target for the user as done + does not create todo for guests + does not create directly addressed todo for guests + #new_award_emoji + marks related pending todos to the target for the user as done + #merge_request_build_failed + creates a pending todo for the merge request author + creates a pending todo for merge_user + #merge_request_push + marks related pending todos to the target for the user as done + #merge_request_became_unmergeable + creates a pending todo for a merge_user + #mark_todo + creates a todo from a merge request + #new_note + creates a todo for mentioned user on new diff note + creates a directly addressed todo for addressed user on new diff note + creates a todo for mentioned user on legacy diff note + does not create todo for guests + #update_note + creates a todo for each valid mentioned user not included in skip_users + creates a todo for each valid user not included in skip_users based on the type of mention + creates a directly addressed todo for each valid addressed user not included in skip_users + does not create a todo if user was already mentioned and todo is pending + does not create a todo if user was already mentioned and todo is done + does not create a directly addressed todo if user was already mentioned or addressed and todo is pending + does not create a directly addressed todo if user was already mentioned or addressed and todo is done + #mark_todos_as_done + marks a relation of todos as done + marks an array of todos as done + returns the ids of updated todos + when some of the todos are done already + returns the ids of those still pending + returns an empty array if all are done + #mark_todos_as_done_by_ids + marks an array of todo ids as done + marks a single todo id as done + caches the number of todos of a user + +Gitlab::ImportExport::ProjectTreeSaver + saves the project tree into a json object + saves project successfully + JSON + saves the correct json + has milestones + has merge requests + has merge request's milestones + has merge request's source branch SHA + has merge request's target branch SHA + has events + has snippets + has snippet notes + has releases + has issues + has issue comments + has issue assignees + has author on issue comments + has project members + has merge requests diffs + has merge request diff files + has merge request diff commits + has merge requests comments + has author on merge requests comments + has pipeline stages + has pipeline statuses + has pipeline builds + has no when YML attributes but only the DB column + has pipeline commits + has ci pipeline notes + has labels with no associations + has labels associated to records + has project and group labels + has priorities associated to labels + saves the correct service type + saves the properties for a service + has project feature + has custom attributes + has badges + does not complain about non UTF-8 characters in MR diff files + with description override + overrides the project description + group members + does not export group members if it has no permission + does not export group members as master + exports group members as group owner + as admin + exports group members as admin + exports group members as project members + project attributes + contains the html description + does not contain the runners token + +Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits + #perform + when the diff IDs passed do not exist + does not raise + when the merge request diff has no serialised commits or diffs + does not raise + processing multiple merge request diffs + when BUFFER_ROWS is exceeded + inserts commit rows in chunks of BUFFER_ROWS + inserts diff rows in chunks of DIFF_FILE_BUFFER_ROWS + when BUFFER_ROWS is not exceeded + only updates once + when some rows were already inserted due to a previous failure + does not raise + logs a message + ends up with the correct rows + when the merge request diff update fails + raises an error + logs the error + still adds diff commits + still adds diff files + when the merge request diff has valid commits and diffs + creates correct entries in the merge_request_diff_commits table + creates correct entries in the merge_request_diff_files table + sets the st_commits and st_diffs columns to nil + when the merge request diff has diffs but no commits + creates correct entries in the merge_request_diff_commits table + creates correct entries in the merge_request_diff_files table + sets the st_commits and st_diffs columns to nil + when the merge request diffs do not have too_large set + creates correct entries in the merge_request_diff_commits table + creates correct entries in the merge_request_diff_files table + sets the st_commits and st_diffs columns to nil + when the merge request diffs do not have a_mode and b_mode set + creates correct entries in the merge_request_diff_commits table + creates correct entries in the merge_request_diff_files table + sets the st_commits and st_diffs columns to nil + when the merge request diffs have binary content + creates correct entries in the merge_request_diff_commits table + creates correct entries in the merge_request_diff_files table + sets the st_commits and st_diffs columns to nil + when the merge request diff has commits, but no diffs + creates correct entries in the merge_request_diff_commits table + creates correct entries in the merge_request_diff_files table + sets the st_commits and st_diffs columns to nil + when the merge request diffs have invalid content + creates correct entries in the merge_request_diff_commits table + creates correct entries in the merge_request_diff_files table + sets the st_commits and st_diffs columns to nil + when the merge request diffs are Rugged::Patch instances + creates correct entries in the merge_request_diff_commits table + creates correct entries in the merge_request_diff_files table + sets the st_commits and st_diffs columns to nil + when the merge request diffs are Rugged::Diff::Delta instances + creates correct entries in the merge_request_diff_commits table + creates correct entries in the merge_request_diff_files table + sets the st_commits and st_diffs columns to nil + +Projects::MergeRequestsController + GET commit_change_content + renders commit_change_content template + GET show + behaves like loads labels + loads labels into the @labels variable + as html + renders merge request page + loads notes + with special_role FIRST_TIME_CONTRIBUTOR + as json + with basic serializer param + renders basic MR entity as json + with widget serializer param + renders widget MR entity as json + when no serialiser was passed + renders widget MR entity as json + as diff + triggers workhorse to serve the request + as patch + triggers workhorse to serve the request + GET index + behaves like issuables list meta-data + creates indexed meta-data object for issuable notes and votes count + when given empty collection + doesn't execute any queries with false conditions + when page param + redirects to last_page if page number is larger than number of pages + redirects to specified page + does not redirect to external sites when provided a host field + when filtering by opened state + with opened merge requests + lists those merge requests + with reopened merge requests + lists those merge requests + PUT update + changing the assignee + limits the attributes exposed on the assignee + when user does not have access to update issue + responds with 404 + there is no source project + closes MR without errors + allows editing of a closed merge request + does not allow to update target branch closed merge request + behaves like update invalid issuable + when updating causes conflicts + renders edit when format is html + renders json error message when format is json + when updating an invalid issuable + renders edit when merge request is invalid + POST merge + when user cannot access + returns 404 + when the merge request is not mergeable + returns :failed + when the sha parameter does not match the source SHA + returns :sha_mismatch + when the sha parameter matches the source SHA + returns :success + starts the merge immediately + when the pipeline succeeds is passed + returns :merge_when_pipeline_succeeds + sets the MR to merge when the pipeline succeeds + when project.only_allow_merge_if_pipeline_succeeds? is true + returns :merge_when_pipeline_succeeds + and head pipeline is not the current one + returns :failed + only_allow_merge_if_all_discussions_are_resolved? setting + when enabled + with unresolved discussion + returns :failed + with all discussions resolved + returns :success + when disabled + with unresolved discussion + returns :success + with all discussions resolved + returns :success + DELETE destroy + denies access to users unless they're admin or project owner + when the user is owner + deletes the merge request + delegates the update of the todos count cache to TodoService + GET commits + renders the commits template to a string + GET pipelines + responds with serialized pipelines + POST remove_wip + removes the wip status + renders MergeRequest as JSON + POST cancel_merge_when_pipeline_succeeds + calls MergeRequests::MergeWhenPipelineSucceedsService + should respond with numeric status code success + renders MergeRequest as JSON + POST assign_related_issues + shows a flash message on success + correctly pluralizes flash message on success + calls MergeRequests::AssignIssuesService + is skipped when not signed in + GET ci_environments_status + the environment is from a forked project + links to the environment on that project + GET pipeline_status.json + when head_pipeline exists + return a detailed head_pipeline status in json + when head_pipeline does not exist + return empty + POST #rebase + successfully + enqeues a RebaseWorker + with a forked project + user cannot push to source branch + returns 404 + user can push to source branch + returns 200 + +GroupsController + GET #show + as html + assigns whether or not a group has children + as atom + assigns events for all the projects in the group + GET #new + when creating subgroups + and can_create_group is true + and logged in as Admin + behaves like member with ability to create subgroups + renders the new page (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and logged in as Owner + behaves like member with ability to create subgroups + renders the new page (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and logged in as Guest + behaves like member without ability to create subgroups + renders the 404 page (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and logged in as Developer + behaves like member without ability to create subgroups + renders the 404 page (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and logged in as Master + behaves like member without ability to create subgroups + renders the 404 page (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and can_create_group is false + and logged in as Admin + behaves like member with ability to create subgroups + renders the new page (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and logged in as Owner + behaves like member with ability to create subgroups + renders the new page (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and logged in as Guest + behaves like member without ability to create subgroups + renders the 404 page (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and logged in as Developer + behaves like member without ability to create subgroups + renders the 404 page (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and logged in as Master + behaves like member without ability to create subgroups + renders the 404 page (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + GET #activity + as json + includes all projects in event feed + POST #create + when creating subgroups + and can_create_group is true + and logged in as Owner + creates the subgroup (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and logged in as Developer + renders the new template (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and can_create_group is false + and logged in as Owner + creates the subgroup (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + and logged in as Developer + renders the new template (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + when creating a top level group + and can_create_group is enabled + creates the Group + and can_create_group is disabled + does not create the Group + GET #index + as a user + redirects to Groups Dashboard + as a guest + redirects to Explore Groups + GET #issues + sorting by votes + sorts most popular issues + sorts least popular issues + GET #merge_requests + sorting by votes + sorts most popular merge requests + sorts least popular merge requests + DELETE #destroy + as another user + returns 404 + as the group owner + schedules a group destroy + redirects to the root path + PUT update + updates the path successfully + does not update the path on error + #ensure_canonical_path + for a GET request + when requesting groups at the root path + when requesting the canonical path with different casing + redirects to the correct casing + when requesting a redirected path + redirects to the canonical path + when the old group path is a substring of the scheme or host + does not modify the requested host + when the old group path is substring of groups + does not modify the /groups part of the path + when requesting groups under the /groups path + when requesting the canonical path + non-show path + with exactly matching casing + does not redirect + with different casing + redirects to the correct casing + show path + with exactly matching casing + does not redirect + with different casing + redirects to the correct casing at the root path + when requesting a redirected path + redirects to the canonical path + when the old group path is a substring of the scheme or host + does not modify the requested host + when the old group path is substring of groups + does not modify the /groups part of the path + when the old group path is substring of groups plus the new path + does not modify the /groups part of the path + for a POST request + when requesting the canonical path with different casing + does not 404 + does not redirect to the correct casing + when requesting a redirected path + returns not found + for a DELETE request + when requesting the canonical path with different casing + does not 404 + does not redirect to the correct casing + when requesting a redirected path + returns not found + PUT transfer + when transfering to a subgroup goes right + should return a notice (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should redirect to the new path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when converting to a root group goes right + should return a notice (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should redirect to the new path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + When the transfer goes wrong + should return an alert (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should redirect to the current path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the user is not allowed to transfer the group + should be denied (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + +Import/Export - project import integration test +Starting the Capybara driver server... + invalid project + when selecting the namespace + prefilled the path + user imports an exported project successfully + path is not prefilled + user imports an exported project successfully + +Gitlab::Middleware::Go + #call + when go-get=0 + skips go-import generation + when go-get=1 + with SSH disabled + with simple 2-segment project path + with subpackages + returns the full project path + without subpackages + returns the full project path + with a nested project path + with subpackages + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + with a subpackage that is not a valid project path + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + without subpackages + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + with a bogus path + skips go-import generation + with HTTP disabled + with simple 2-segment project path + with subpackages + returns the full project path + without subpackages + returns the full project path + with a nested project path + with subpackages + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + with a subpackage that is not a valid project path + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + without subpackages + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + with a bogus path + skips go-import generation + with nothing disabled + with simple 2-segment project path + with subpackages + returns the full project path + without subpackages + returns the full project path + with a nested project path + with subpackages + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + with a subpackage that is not a valid project path + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + without subpackages + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + with a bogus path + skips go-import generation + with nothing disabled (blank string) + with simple 2-segment project path + with subpackages + returns the full project path + without subpackages + returns the full project path + with a nested project path + with subpackages + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + with a subpackage that is not a valid project path + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + without subpackages + behaves like a nested project + when the project is public + returns the full project path + when the project is private + when not authenticated + behaves like unauthorized + returns the 2-segment group path + when authenticated + using warden + when active + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + when blocked + behaves like unauthorized + returns the 2-segment group path + using a personal access token + with api scope + behaves like authenticated + with access to the project + returns the full project path + without access to the project + behaves like unauthorized + returns the 2-segment group path + with read_user scope + behaves like unauthorized + returns the 2-segment group path + with a bogus path + skips go-import generation + +Groups::TransferService + #execute + when transforming a group into a root group + behaves like ensuring allowed transfer for a group + with other database than PostgreSQL + should return false (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should add an error on group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when there's an exception on Gitlab shell directories + should return false (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should add an error on group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the group is already a root group + should add an error on group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the user does not have the right policies + should return false (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should add an error on group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when there is a group with the same path + should return false (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should add an error on group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the group is a subgroup and the transfer is valid + should update group attributes (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should update group children path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should update group projects path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when transferring a subgroup into another group + behaves like ensuring allowed transfer for a group + with other database than PostgreSQL + should return false (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should add an error on group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when there's an exception on Gitlab shell directories + should return false (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should add an error on group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the new parent group is the same as the previous parent group + should return false (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should add an error on group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the user does not have the right policies + should return false (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should add an error on group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the parent has a group with the same path + should return false (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should add an error on group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the parent group has a project with the same path + should return false (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should add an error on group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the group is allowed to be transferred + should update visibility for the group based on the parent group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should update parent group to the new parent (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should return the group as children of the new parent (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should create a redirect for the group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the group has a lower visibility than the parent group + should not update the visibility for the group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the group has a higher visibility than the parent group + should update visibility level based on the parent group (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when transferring a group with group descendants + should update subgroups path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should create redirects for the subgroups (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the new parent has a higher visibility than the children + should not update the children visibility (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the new parent has a lower visibility than the children + should update children visibility to match the new parent (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when transferring a group with project descendants + should update projects path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should create permanent redirects for the projects (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the new parent has a higher visibility than the projects + should not update projects visibility (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when the new parent has a lower visibility than the projects + should update projects visibility to match the new parent (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when transferring a group with subgroups & projects descendants + should update subgroups path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should update projects path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should create redirect for the subgroups and projects (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when transfering a group with nested groups and projects + should update subgroups path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should update projects path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + should create redirect for the subgroups and projects (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + when updating the group goes wrong + should restore group and projects visibility (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example) + +Editing file blob + as a developer + from MR diff + returns me to the mr + from blob file path + updates content + previews content + visit blob edit + redirects to sign in and returns + as developer + redirects to sign in and returns + as guest + redirects to sign in and returns + as developer + on some branch + shows blob editor with same branch + with protected branch + shows blob editor with patch branch + as master + shows blob editor with same branch + +Boards::Lists::MoveService + #execute + when board parent is a project + behaves like lists move service + keeps position of lists when list type is closed + when list type is set to label + keeps position of lists when new position is nil + keeps position of lists when new positon is equal to old position + keeps position of lists when new positon is negative + keeps position of lists when new positon is equal to number of labels lists + keeps position of lists when new positon is greater than number of labels lists + increments position of intermediate lists when new positon is equal to first position + decrements position of intermediate lists when new positon is equal to last position + decrements position of intermediate lists when new position is greater than old position + increments position of intermediate lists when new position is lower than old position + when board parent is a group + behaves like lists move service + keeps position of lists when list type is closed + when list type is set to label + keeps position of lists when new position is nil + keeps position of lists when new positon is equal to old position + keeps position of lists when new positon is negative + keeps position of lists when new positon is equal to number of labels lists + keeps position of lists when new positon is greater than number of labels lists + increments position of intermediate lists when new positon is equal to first position + decrements position of intermediate lists when new positon is equal to last position + decrements position of intermediate lists when new position is greater than old position + increments position of intermediate lists when new position is lower than old position + +CreateDeploymentService + #execute + when environment exists + creates a deployment + when environment does not exist + does not create a deployment + when start action is defined + and environment is stopped + makes environment available + creates a deployment + when stop action is defined + and environment is available + makes environment stopped + does not create a deployment + when variables are used + creates a new deployment + does not create a new environment + updates external url + when project was removed + does not create deployment or environment + #expanded_environment_url + when yaml environment uses $CI_COMMIT_REF_NAME + should eq "http://review/master" + when yaml environment uses $CI_ENVIRONMENT_SLUG + should eq "http://review/prod-slug" + when yaml environment uses yaml_variables containing symbol keys + should eq "http://review/host" + when yaml environment does not have url + returns the external_url from persisted environment + processing of builds + without environment specified + behaves like does not create deployment + does not create a new deployment + does not call a service + when environment is specified + when job succeeds + behaves like creates deployment + creates a new deployment + calls a service + is set as deployable + updates environment URL + when job fails + behaves like does not create deployment + does not create a new deployment + does not call a service + when job is retried + behaves like creates deployment + creates a new deployment + calls a service + is set as deployable + updates environment URL + merge request metrics + while updating the 'first_deployed_to_production_at' time + for merge requests merged before the current deploy + sets the time if the deploy's environment is 'production' + doesn't set the time if the deploy's environment is not 'production' + does not raise errors if the merge request does not have a metrics record + for merge requests merged before the previous deploy + if the 'first_deployed_to_production_at' time is already set + does not overwrite the older 'first_deployed_to_production_at' time + if the 'first_deployed_to_production_at' time is not already set + does not overwrite the older 'first_deployed_to_production_at' time + +Groups::MilestonesController + #index + shows group milestones page + as JSON + lists legacy group milestones and group milestones + #show + when there is a title parameter + searchs for a legacy group milestone + when there is not a title parameter + searchs for a group milestone + behaves like milestone tabs + #merge_requests + as html + redirects to milestone#show + as json + renders the merge requests tab template to a string + #participants + as html + redirects to milestone#show + as json + renders the participants tab template to a string + #labels + as html + redirects to milestone#show + as json + renders the labels tab template to a string + #create + creates group milestone with Chinese title + #update + updates group milestone + legacy group milestones + updates only group milestones state + #ensure_canonical_path + for a GET request + when requesting the canonical path + non-show path + with exactly matching casing + does not redirect + with different casing + redirects to the correct casing + show path + with exactly matching casing + does not redirect + with different casing + redirects to the correct casing + when requesting a redirected path + redirects to the canonical path + when the old group path is a substring of the scheme or host + does not modify the requested host + when the old group path is substring of groups + does not modify the /groups part of the path + when the old group path is substring of groups plus the new path + does not modify the /groups part of the path + for a non-GET request + when requesting the canonical path with different casing + does not 404 + does not redirect to the correct casing + when requesting a redirected path + returns not found + +GroupsHelper + group_icon + returns an url for the avatar + group_icon_url + returns an url for the avatar + gives default avatar_icon when no avatar is present + group_lfs_status + only one project in group + returns all projects as enabled + returns all projects as disabled + more than one project in group + LFS enabled in group + returns both projects as enabled + returns only one as enabled + LFS disabled in group + returns both projects as disabled + returns only one as disabled + group_title + outputs the groups in the correct order (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + #share_with_group_lock_help_text + root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :root_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :sub_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :sub_sub_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :root_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :sub_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :sub_sub_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :root_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :subgroup + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :sub_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :subgroup + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :sub_sub_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :subgroup + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :root_owner, help_text: :ancestor_locked_but_you_can_override, linked_ancestor: :subgroup + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :sub_owner, help_text: :ancestor_locked_but_you_can_override, linked_ancestor: :subgroup + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :sub_sub_owner, help_text: :ancestor_locked_so_ask_the_owner, linked_ancestor: :subgroup + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :root_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :sub_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :sub_sub_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :root_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :sub_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :sub_sub_owner, help_text: :default_help, linked_ancestor: nil + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :root_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :root_group + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :sub_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :root_group + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :sub_sub_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :root_group + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :root_owner, help_text: :ancestor_locked_but_you_can_override, linked_ancestor: :root_group + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :sub_owner, help_text: :ancestor_locked_so_ask_the_owner, linked_ancestor: :root_group + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :sub_sub_owner, help_text: :ancestor_locked_so_ask_the_owner, linked_ancestor: :root_group + has the correct help text with correct ancestor links (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + #group_sidebar_links + returns all the expected links + includes settings when the user can admin the group + excludes cross project features when the user cannot read cross project + +API::V3::Todos + DELETE /todos/:id + when unauthenticated + returns authentication error + when authenticated + marks a todo as done + updates todos cache + returns 404 if the todo does not belong to the current user + DELETE /todos + when unauthenticated + returns authentication error + when authenticated + marks all todos as done + updates todos cache + +TeamcityService + Associations + should belong to project + should have one service_hook + Validations + when service is active + should validate that :build_type cannot be empty/falsy + should validate that :teamcity_url cannot be empty/falsy + behaves like issue tracker service URL attribute + should allow :teamcity_url to be ‹"https://example.com"› + should not allow :teamcity_url to be ‹"example.com"› + should not allow :teamcity_url to be ‹"ftp://example.com"› + should not allow :teamcity_url to be ‹"herp-and-derp"› + #username + does not validate the presence of username if password is nil + validates the presence of username if password is present + #password + does not validate the presence of password if username is nil + validates the presence of password if username is present + when service is inactive + should not validate that :build_type cannot be empty/falsy + should not validate that :teamcity_url cannot be empty/falsy + should not validate that :username cannot be empty/falsy + should not validate that :password cannot be empty/falsy + Callbacks + before_update :reset_password + saves password if new url is set together with password when no password was previously set + when a password was previously set + resets password if url changed + does not reset password if username changed + does not reset password if new url is set together with password, even if it's the same password + #build_page + returns the contents of the reactive cache + #commit_status + returns the contents of the reactive cache + #calculate_reactive_cache + build_page + returns a specific URL when status is 500 + returns a build URL when teamcity_url has no trailing slash + teamcity_url has trailing slash + returns a build URL + commit_status + sets commit status to :error when status is 500 + sets commit status to "pending" when status is 404 + sets commit status to "success" when build status contains SUCCESS + sets commit status to "failed" when build status contains FAILURE + sets commit status to "pending" when build status contains Pending + sets commit status to :error when build status is unknown + +Gitlab::Conflict::File + #resolve_lines + raises ResolutionError when passed a hash without resolutions for all sections + when resolving everything to the same side + has the correct number of lines + has content matching the chosen lines + with mixed resolutions + has the correct number of lines + returns a file containing only the chosen parts of the resolved sections + #highlight_lines! + modifies the existing lines + is called implicitly when rich_text is accessed on a line + sets the rich_text of the lines matching the text content + highlights the lines correctly + #sections + only inserts match lines when there is a gap between sections + sets conflict to false for sections with only unchanged lines + only includes a maximum of CONTEXT_LINES (plus an optional match line) in context sections + sets conflict to true for sections with only changed lines + adds unique IDs to conflict sections, and not to other sections + with an example file + sets the correct match line headers + does not add match lines where they are not needed + creates context sections of the correct length + #as_json + includes the blob path for the file + includes the blob icon for the file + with the full_content option passed + includes the full content of the conflict + includes the detected language of the conflict file + +Banzai::Filter::SnippetReferenceFilter + requires project context + ignores valid references contained inside 'pre' element + ignores valid references contained inside 'code' element + ignores valid references contained inside 'a' element + ignores valid references contained inside 'style' element + internal reference + links to a valid reference + links with adjacent text + ignores invalid snippet IDs + includes a title attribute + escapes the title attribute + includes default classes + includes a data-project attribute + includes a data-snippet attribute + supports an :only_path context + cross-project / cross-namespace complete reference + links to a valid reference + link has valid text + has valid text + ignores invalid snippet IDs on the referenced project + cross-project / same-namespace complete reference + links to a valid reference + link has valid text + has valid text + ignores invalid snippet IDs on the referenced project + cross-project shorthand reference + links to a valid reference + link has valid text + has valid text + ignores invalid snippet IDs on the referenced project + cross-project URL reference + links to a valid reference + links with adjacent text + ignores invalid snippet IDs on the referenced project + group context + links to a valid reference + +AutocompleteUsersFinder + #execute + should contain exactly #, #, #, and # + when current_user not passed or nil + should contain exactly + when project passed + should contain exactly # + when author_id passed + should contain exactly # and # + when group passed and project not passed + should contain exactly # + when passed a subgroup + includes users from parent groups as well (PENDING: around hook at ./spec/spec_helper.rb:186 did not execute the example) + when filtered by search + should contain exactly # + when filtered by skip_users + should contain exactly # and # + when todos exist + when filtered by todo_filter without todo_state_filter + should contain exactly + when filtered by todo_filter with pending todo_state_filter + should contain exactly # + when filtered by todo_filter with done todo_state_filter + should contain exactly # + when filtered by current_user + should contain exactly #, #, #, and # + when filtered by author_id + should contain exactly #, #, #, #, and # + +Service + Associations + should belong to project + should have one service_hook + Validations + should validate that :type cannot be empty/falsy + Scopes + .confidential_note_hooks + includes services where confidential_note_events is true + excludes services where confidential_note_events is false + Test Button + #can_test? + when repository is not empty + returns true + when repository is empty + returns true + #test + when repository is not empty + test runs execute + when repository is empty + test runs execute + Template + .build_from_template + when template is invalid + sets service template to inactive when template is invalid + for pushover service + is prefilled for projects pushover service + has all fields prefilled + {property}_changed? + returns false when the property has not been assigned a new value + returns true when the property has been assigned a different value + returns true when the property has been assigned a different value twice + returns false when the property has been re-assigned the same value + returns false when the property has been assigned a new value then saved + {property}_touched? + returns false when the property has not been assigned a new value + returns true when the property has been assigned a different value + returns true when the property has been assigned a different value twice + returns true when the property has been re-assigned the same value + returns false when the property has been assigned a new value then saved + {property}_was + returns nil when the property has not been assigned a new value + returns the previous value when the property has been assigned a different value + returns initial value when the property has been re-assigned the same value + returns initial value when the property has been assigned multiple values + returns nil when the property has been assigned a new value then saved + initialize service with no properties + does not raise error + creates the properties + callbacks + on create + updates the has_external_issue_tracker boolean + on update + updates the has_external_issue_tracker boolean + #deprecated? + should return false by default + #deprecation_message + should be empty by default + .find_by_template + returns service template + #api_field_names + filters out sensitive fields + +TestHooks::ProjectService + #execute + hook with not implemented test + returns error message + push_events + returns error message if not enough data + executes hook + tag_push_events + returns error message if not enough data + executes hook + note_events + returns error message if not enough data + executes hook + issues_events + returns error message if not enough data + executes hook + confidential_issues_events + returns error message if not enough data + executes hook + merge_requests_events + returns error message if not enough data + executes hook + job_events + returns error message if not enough data + executes hook + pipeline_events + returns error message if not enough data + executes hook + wiki_page_events + returns error message if wiki disabled + returns error message if not enough data + executes hook + +User views an open merge request + when a merge request does not have repository + renders both the title and the description + when a merge request has repository + when rendering description preview + renders empty description preview + renders description preview + when the branch is rebased on the target + does not show diverged commits count + when the branch is diverged on the target + shows diverged commits count + +RunnerJobsFinder + #execute + when params is empty + returns all jobs assigned to Runner + when params contains status + when status is created + returns matched job + when status is pending + returns matched job + when status is running + returns matched job + when status is success + returns matched job + when status is failed + returns matched job + when status is canceled + returns matched job + when status is skipped + returns matched job + when status is manual + returns matched job + +Project snippets + when the project has snippets + pagination + behaves like paginated snippets + is limited to 20 items per page + clicking on the link to the second page + shows the remaining snippets + list content + contains all project snippets + when submitting a note + should have autocomplete + should have zen mode + +API::V3::Environments + GET /projects/:id/environments + as member of the project + returns project environments + behaves like a paginated resources + has pagination headers + as non member + returns a 404 status code + POST /projects/:id/environments + as a member + creates a environment with valid params + requires name to be passed + returns a 400 if environment already exists + returns a 400 if slug is specified + a non member + rejects the request + returns a 400 when the required params are missing + PUT /projects/:id/environments/:environment_id + returns a 200 if name and external_url are changed + won't allow slug to be changed + won't update the external_url if only the name is passed + returns a 404 if the environment does not exist + DELETE /projects/:id/environments/:environment_id + as a master + returns a 200 for an existing environment + returns a 404 for non existing id + a non member + rejects the request + +API::Namespaces + GET /namespaces + when unauthenticated + returns authentication error + when authenticated as admin + returns correct attributes + admin: returns an array of all namespaces + admin: returns an array of matched namespaces + when authenticated as a regular user + returns correct attributes when user can admin group + returns correct attributes when user cannot admin group + user: returns an array of namespaces + admin: returns an array of matched namespaces + GET /namespaces/:id + when unauthenticated + returns authentication error + when authenticated as regular user + when requested namespace is not owned by user + when requesting group + returns not-found + when requesting personal namespace + returns not-found + when requested namespace is owned by user + behaves like namespace reader + when namespace exists + when requested by ID + when requesting group + behaves like can access namespace + returns namespace details + when requesting personal namespace + behaves like can access namespace + returns namespace details + when requested by path + when requesting group + behaves like can access namespace + returns namespace details + when requesting personal namespace + behaves like can access namespace + returns namespace details + when namespace doesn't exist + returns not-found + when authenticated as admin + when requested namespace is not owned by user + when requesting group + behaves like can access namespace + returns namespace details + when requesting personal namespace + behaves like can access namespace + returns namespace details + when requested namespace is owned by user + behaves like namespace reader + when namespace exists + when requested by ID + when requesting group + behaves like can access namespace + returns namespace details + when requesting personal namespace + behaves like can access namespace + returns namespace details + when requested by path + when requesting group + behaves like can access namespace + returns namespace details + when requesting personal namespace + behaves like can access namespace + returns namespace details + when namespace doesn't exist + returns not-found + +MergeRequests::GetUrlsService + #execute + pushing to default branch + behaves like no_merge_request_url + returns no URL + pushing to project with MRs disabled + behaves like no_merge_request_url + returns no URL + pushing one completely new branch + behaves like new_merge_request_link + returns url to create new merge request + pushing to existing branch but no merge request + behaves like new_merge_request_link + returns url to create new merge request + pushing to deleted branch + behaves like no_merge_request_url + returns no URL + pushing to existing branch and merge request opened + behaves like show_merge_request_url + returns url to view merge request + pushing to existing branch and merge request is reopened + behaves like show_merge_request_url + returns url to view merge request + pushing to existing branch from forked project + behaves like show_merge_request_url + returns url to view merge request + pushing to existing branch and merge request is closed + behaves like new_merge_request_link + returns url to create new merge request + pushing to existing branch and merge request is merged + behaves like new_merge_request_link + returns url to create new merge request + pushing new branch and existing branch (with merge request created) at once + returns 2 urls for both creating new and showing merge request + when printing_merge_request_link_enabled is false + returns empty array + +LfsFileLock + should belong to project + should belong to user + should validate that :project_id cannot be empty/falsy + should validate that :user_id cannot be empty/falsy + should validate that :path cannot be empty/falsy + #can_be_unlocked_by? + when it's forced + can be unlocked by the author + can be unlocked by a master + can't be unlocked by other user + when it isn't forced + can be unlocked by the author + can't be unlocked by a master + can't be unlocked by other user + +Gitlab::Ci::Config::Entry::Boolean + validations + when entry config value is valid + #value + returns key value + #valid? + is valid + when entry value is not valid + #errors + saves errors +Knapsack report was generated. Preview: +{ + "spec/services/todo_service_spec.rb": 53.71851348876953, + "spec/lib/gitlab/import_export/project_tree_saver_spec.rb": 48.39624857902527, + "spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb": 35.17360734939575, + "spec/controllers/projects/merge_requests_controller_spec.rb": 25.50887441635132, + "spec/controllers/groups_controller_spec.rb": 13.007296323776245, + "spec/features/projects/import_export/import_file_spec.rb": 16.827879428863525, + "spec/lib/gitlab/middleware/go_spec.rb": 12.497276306152344, + "spec/features/projects/blobs/edit_spec.rb": 11.511932134628296, + "spec/services/boards/lists/move_service_spec.rb": 8.695446491241455, + "spec/services/create_deployment_service_spec.rb": 6.754847526550293, + "spec/controllers/groups/milestones_controller_spec.rb": 6.8740551471710205, + "spec/helpers/groups_helper_spec.rb": 0.9002459049224854, + "spec/requests/api/v3/todos_spec.rb": 6.5924904346466064, + "spec/models/project_services/teamcity_service_spec.rb": 2.9881808757781982, + "spec/lib/gitlab/conflict/file_spec.rb": 5.294132709503174, + "spec/lib/banzai/filter/snippet_reference_filter_spec.rb": 4.118850469589233, + "spec/finders/autocomplete_users_finder_spec.rb": 3.864232063293457, + "spec/models/service_spec.rb": 3.1697962284088135, + "spec/services/test_hooks/project_service_spec.rb": 4.167759656906128, + "spec/features/projects/merge_requests/user_views_open_merge_request_spec.rb": 4.707003355026245, + "spec/finders/runner_jobs_finder_spec.rb": 3.2137575149536133, + "spec/features/projects/snippets_spec.rb": 3.631467580795288, + "spec/requests/api/v3/environments_spec.rb": 2.314746856689453, + "spec/requests/api/namespaces_spec.rb": 2.352935314178467, + "spec/services/merge_requests/get_urls_service_spec.rb": 2.8039824962615967, + "spec/models/lfs_file_lock_spec.rb": 0.7295050621032715, + "spec/lib/gitlab/ci/config/entry/boolean_spec.rb": 0.007024049758911133 +} + +Knapsack global time execution for tests: 04m 49s + +Pending: (Failures listed here are expected and do not affect your suite's status) + + 1) GroupsController GET #new when creating subgroups and can_create_group is true and logged in as Admin behaves like member with ability to create subgroups renders the new page + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:15 + + 2) GroupsController GET #new when creating subgroups and can_create_group is true and logged in as Owner behaves like member with ability to create subgroups renders the new page + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:15 + + 3) GroupsController GET #new when creating subgroups and can_create_group is true and logged in as Guest behaves like member without ability to create subgroups renders the 404 page + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:25 + + 4) GroupsController GET #new when creating subgroups and can_create_group is true and logged in as Developer behaves like member without ability to create subgroups renders the 404 page + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:25 + + 5) GroupsController GET #new when creating subgroups and can_create_group is true and logged in as Master behaves like member without ability to create subgroups renders the 404 page + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:25 + + 6) GroupsController GET #new when creating subgroups and can_create_group is false and logged in as Admin behaves like member with ability to create subgroups renders the new page + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:15 + + 7) GroupsController GET #new when creating subgroups and can_create_group is false and logged in as Owner behaves like member with ability to create subgroups renders the new page + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:15 + + 8) GroupsController GET #new when creating subgroups and can_create_group is false and logged in as Guest behaves like member without ability to create subgroups renders the 404 page + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:25 + + 9) GroupsController GET #new when creating subgroups and can_create_group is false and logged in as Developer behaves like member without ability to create subgroups renders the 404 page + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:25 + + 10) GroupsController GET #new when creating subgroups and can_create_group is false and logged in as Master behaves like member without ability to create subgroups renders the 404 page + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:25 + + 11) GroupsController POST #create when creating subgroups and can_create_group is true and logged in as Owner creates the subgroup + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:117 + + 12) GroupsController POST #create when creating subgroups and can_create_group is true and logged in as Developer renders the new template + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:129 + + 13) GroupsController POST #create when creating subgroups and can_create_group is false and logged in as Owner creates the subgroup + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:117 + + 14) GroupsController POST #create when creating subgroups and can_create_group is false and logged in as Developer renders the new template + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:129 + + 15) GroupsController PUT transfer when transfering to a subgroup goes right should return a notice + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:516 + + 16) GroupsController PUT transfer when transfering to a subgroup goes right should redirect to the new path + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:520 + + 17) GroupsController PUT transfer when converting to a root group goes right should return a notice + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:535 + + 18) GroupsController PUT transfer when converting to a root group goes right should redirect to the new path + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:539 + + 19) GroupsController PUT transfer When the transfer goes wrong should return an alert + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:557 + + 20) GroupsController PUT transfer When the transfer goes wrong should redirect to the current path + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:561 + + 21) GroupsController PUT transfer when the user is not allowed to transfer the group should be denied + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/controllers/groups_controller_spec.rb:577 + + 22) Groups::TransferService#execute when transforming a group into a root group behaves like ensuring allowed transfer for a group with other database than PostgreSQL should return false + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:15 + + 23) Groups::TransferService#execute when transforming a group into a root group behaves like ensuring allowed transfer for a group with other database than PostgreSQL should add an error on group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:19 + + 24) Groups::TransferService#execute when transforming a group into a root group behaves like ensuring allowed transfer for a group when there's an exception on Gitlab shell directories should return false + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:33 + + 25) Groups::TransferService#execute when transforming a group into a root group behaves like ensuring allowed transfer for a group when there's an exception on Gitlab shell directories should add an error on group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:37 + + 26) Groups::TransferService#execute when transforming a group into a root group when the group is already a root group should add an error on group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:53 + + 27) Groups::TransferService#execute when transforming a group into a root group when the user does not have the right policies should return false + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:62 + + 28) Groups::TransferService#execute when transforming a group into a root group when the user does not have the right policies should add an error on group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:66 + + 29) Groups::TransferService#execute when transforming a group into a root group when there is a group with the same path should return false + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:79 + + 30) Groups::TransferService#execute when transforming a group into a root group when there is a group with the same path should add an error on group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:83 + + 31) Groups::TransferService#execute when transforming a group into a root group when the group is a subgroup and the transfer is valid should update group attributes + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:99 + + 32) Groups::TransferService#execute when transforming a group into a root group when the group is a subgroup and the transfer is valid should update group children path + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:103 + + 33) Groups::TransferService#execute when transforming a group into a root group when the group is a subgroup and the transfer is valid should update group projects path + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:109 + + 34) Groups::TransferService#execute when transferring a subgroup into another group behaves like ensuring allowed transfer for a group with other database than PostgreSQL should return false + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:15 + + 35) Groups::TransferService#execute when transferring a subgroup into another group behaves like ensuring allowed transfer for a group with other database than PostgreSQL should add an error on group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:19 + + 36) Groups::TransferService#execute when transferring a subgroup into another group behaves like ensuring allowed transfer for a group when there's an exception on Gitlab shell directories should return false + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:33 + + 37) Groups::TransferService#execute when transferring a subgroup into another group behaves like ensuring allowed transfer for a group when there's an exception on Gitlab shell directories should add an error on group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:37 + + 38) Groups::TransferService#execute when transferring a subgroup into another group when the new parent group is the same as the previous parent group should return false + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:125 + + 39) Groups::TransferService#execute when transferring a subgroup into another group when the new parent group is the same as the previous parent group should add an error on group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:129 + + 40) Groups::TransferService#execute when transferring a subgroup into another group when the user does not have the right policies should return false + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:138 + + 41) Groups::TransferService#execute when transferring a subgroup into another group when the user does not have the right policies should add an error on group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:142 + + 42) Groups::TransferService#execute when transferring a subgroup into another group when the parent has a group with the same path should return false + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:155 + + 43) Groups::TransferService#execute when transferring a subgroup into another group when the parent has a group with the same path should add an error on group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:159 + + 44) Groups::TransferService#execute when transferring a subgroup into another group when the parent group has a project with the same path should return false + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:174 + + 45) Groups::TransferService#execute when transferring a subgroup into another group when the parent group has a project with the same path should add an error on group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:178 + + 46) Groups::TransferService#execute when transferring a subgroup into another group when the group is allowed to be transferred should update visibility for the group based on the parent group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:212 + + 47) Groups::TransferService#execute when transferring a subgroup into another group when the group is allowed to be transferred should update parent group to the new parent + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:216 + + 48) Groups::TransferService#execute when transferring a subgroup into another group when the group is allowed to be transferred should return the group as children of the new parent + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:220 + + 49) Groups::TransferService#execute when transferring a subgroup into another group when the group is allowed to be transferred should create a redirect for the group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:225 + + 50) Groups::TransferService#execute when transferring a subgroup into another group when the group is allowed to be transferred when the group has a lower visibility than the parent group should not update the visibility for the group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:194 + + 51) Groups::TransferService#execute when transferring a subgroup into another group when the group is allowed to be transferred when the group has a higher visibility than the parent group should update visibility level based on the parent group + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:205 + + 52) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with group descendants should update subgroups path + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:239 + + 53) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with group descendants should create redirects for the subgroups + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:246 + + 54) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with group descendants when the new parent has a higher visibility than the children should not update the children visibility + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:253 + + 55) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with group descendants when the new parent has a lower visibility than the children should update children visibility to match the new parent + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:264 + + 56) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with project descendants should update projects path + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:282 + + 57) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with project descendants should create permanent redirects for the projects + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:289 + + 58) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with project descendants when the new parent has a higher visibility than the projects should not update projects visibility + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:296 + + 59) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with project descendants when the new parent has a lower visibility than the projects should update projects visibility to match the new parent + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:307 + + 60) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with subgroups & projects descendants should update subgroups path + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:327 + + 61) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with subgroups & projects descendants should update projects path + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:334 + + 62) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with subgroups & projects descendants should create redirect for the subgroups and projects + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:341 + + 63) Groups::TransferService#execute when transferring a subgroup into another group when transfering a group with nested groups and projects should update subgroups path + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:363 + + 64) Groups::TransferService#execute when transferring a subgroup into another group when transfering a group with nested groups and projects should update projects path + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:375 + + 65) Groups::TransferService#execute when transferring a subgroup into another group when transfering a group with nested groups and projects should create redirect for the subgroups and projects + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:383 + + 66) Groups::TransferService#execute when transferring a subgroup into another group when updating the group goes wrong should restore group and projects visibility + # around hook at ./spec/spec_helper.rb:190 did not execute the example + # ./spec/services/groups/transfer_service_spec.rb:405 + + 67) GroupsHelper group_title outputs the groups in the correct order + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:106 + + 68) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :root_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 69) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :sub_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 70) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :sub_sub_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 71) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :root_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 72) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :sub_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 73) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :sub_sub_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 74) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :root_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :subgroup has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 75) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :sub_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :subgroup has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 76) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :sub_sub_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :subgroup has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 77) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :root_owner, help_text: :ancestor_locked_but_you_can_override, linked_ancestor: :subgroup has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 78) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :sub_owner, help_text: :ancestor_locked_but_you_can_override, linked_ancestor: :subgroup has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 79) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: false, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :sub_sub_owner, help_text: :ancestor_locked_so_ask_the_owner, linked_ancestor: :subgroup has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 80) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :root_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 81) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :sub_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 82) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: false, current_user: :sub_sub_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 83) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :root_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 84) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :sub_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 85) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: false, sub_subgroup_share_with_group_locked: true, current_user: :sub_sub_owner, help_text: :default_help, linked_ancestor: nil has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 86) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :root_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :root_group has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 87) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :sub_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :root_group has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 88) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: false, current_user: :sub_sub_owner, help_text: :ancestor_locked_and_has_been_overridden, linked_ancestor: :root_group has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 89) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :root_owner, help_text: :ancestor_locked_but_you_can_override, linked_ancestor: :root_group has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 90) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :sub_owner, help_text: :ancestor_locked_so_ask_the_owner, linked_ancestor: :root_group has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 91) GroupsHelper#share_with_group_lock_help_text root_share_with_group_locked: true, subgroup_share_with_group_locked: true, sub_subgroup_share_with_group_locked: true, current_user: :sub_sub_owner, help_text: :ancestor_locked_so_ask_the_owner, linked_ancestor: :root_group has the correct help text with correct ancestor links + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/helpers/groups_helper_spec.rb:198 + + 92) AutocompleteUsersFinder#execute when passed a subgroup includes users from parent groups as well + # around hook at ./spec/spec_helper.rb:186 did not execute the example + # ./spec/finders/autocomplete_users_finder_spec.rb:55 + +Finished in 5 minutes 7 seconds (files took 16.6 seconds to load) +819 examples, 0 failures, 92 pending + +section_end:1522927514:build_script section_start:1522927514:after_script Running after script... +$ date +Thu Apr 5 11:25:14 UTC 2018 +section_end:1522927515:after_script section_start:1522927515:archive_cache Not uploading cache ruby-2.3.6-with-yarn due to policy +section_end:1522927516:archive_cache section_start:1522927516:upload_artifacts Uploading artifacts... +coverage/: found 5 matching files  +knapsack/: found 5 matching files  +rspec_flaky/: found 4 matching files  +WARNING: tmp/capybara/: no matching files  +Uploading artifacts to coordinator... ok  id=61303283 responseStatus=201 Created token=rusBKvxM +section_end:1522927520:upload_artifacts Job succeeded  \ No newline at end of file diff --git a/spec/models/ci/job_trace_chunk_spec.rb b/spec/models/ci/job_trace_chunk_spec.rb index d3fbec9170b1..6ad4b321c3fa 100644 --- a/spec/models/ci/job_trace_chunk_spec.rb +++ b/spec/models/ci/job_trace_chunk_spec.rb @@ -70,7 +70,7 @@ let(:value) { 'a' * described_class::CHUNK_SIZE } it 'schedules stashing data' do - expect(StashTraceChunkWorker).to receive(:perform_async).once + expect(SwapTraceChunkWorker).to receive(:perform_async).once subject end @@ -107,7 +107,7 @@ context 'when fullfilled chunk size' do it 'does not schedule stashing data' do - expect(StashTraceChunkWorker).not_to receive(:perform_async) + expect(SwapTraceChunkWorker).not_to receive(:perform_async) subject end -- GitLab From b94c84e597c02a6c38cb8f5233c77dc0b15a7c4e Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Thu, 5 Apr 2018 23:43:21 +0900 Subject: [PATCH 37/86] Add spec for ChunkedIO --- lib/gitlab/ci/trace/chunked_io.rb | 37 +- spec/lib/gitlab/ci/trace/chunked_io_spec.rb | 393 ++++++++++++++++++ spec/models/ci/job_trace_chunk_spec.rb | 1 + spec/support/chunked_io/chunked_io_helpers.rb | 11 + 4 files changed, 426 insertions(+), 16 deletions(-) create mode 100644 spec/lib/gitlab/ci/trace/chunked_io_spec.rb create mode 100644 spec/support/chunked_io/chunked_io_helpers.rb diff --git a/lib/gitlab/ci/trace/chunked_io.rb b/lib/gitlab/ci/trace/chunked_io.rb index 653c92358ad5..7aef2ca56eb9 100644 --- a/lib/gitlab/ci/trace/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_io.rb @@ -35,14 +35,6 @@ def binmode? true end - def path - nil - end - - def url - nil - end - def seek(pos, where = IO::SEEK_SET) new_pos = case where @@ -74,18 +66,21 @@ def each_line end end - def read(length = (size - tell), outbuf = "") + def read(length = nil, outbuf = "") out = "" - end_tell = [tell + length, size].min - until end_tell <= tell + length = size - tell unless length + + until length <= 0 || eof? data = chunk_slice_from_offset break if data.empty? - data = data[0, (length % CHUNK_SIZE)] if data.bytesize + tell >= end_tell + chunk_bytes = [CHUNK_SIZE - chunk_offset, length].min + chunk_data = data.byteslice(0, chunk_bytes) - out << data - @tell += data.bytesize + out << chunk_data + @tell += chunk_data.bytesize + length -= chunk_data.bytesize end # If outbuf is passed, we put the output into the buffer. This supports IO.copy_stream functionality @@ -118,7 +113,10 @@ def readline end def write(data) + raise 'Could not write empty data' unless data.present? + start_pos = tell + data = data.force_encoding(Encoding::BINARY) while tell < start_pos + data.bytesize # get slice from current offset till the end where it falls into chunk @@ -129,9 +127,13 @@ def write(data) ensure_chunk.append(chunk_data, chunk_offset) # move offsets within buffer - @tell += chunk_bytes + @tell += chunk_data.bytesize @size = [size, tell].max end + + tell - start_pos + ensure + invalidate_chunk_cache end def truncate(offset) @@ -139,13 +141,14 @@ def truncate(offset) @tell = offset @size = offset - invalidate_chunk_cache # remove all next chunks job_chunks.where('chunk_index > ?', chunk_index).destroy_all # truncate current chunk current_chunk.truncate(chunk_offset) if chunk_offset != 0 + ensure + invalidate_chunk_cache end def flush @@ -158,6 +161,8 @@ def present? def destroy! job_chunks.destroy_all + @tell = @size = 0 + ensure invalidate_chunk_cache end diff --git a/spec/lib/gitlab/ci/trace/chunked_io_spec.rb b/spec/lib/gitlab/ci/trace/chunked_io_spec.rb new file mode 100644 index 000000000000..97781faecd30 --- /dev/null +++ b/spec/lib/gitlab/ci/trace/chunked_io_spec.rb @@ -0,0 +1,393 @@ +require 'spec_helper' + +describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do + include ChunkedIOHelpers + + set(:job) { create(:ci_build, :running) } + let(:chunked_io) { described_class.new(job) } + + before do + stub_feature_flags(ci_enable_live_trace: true) + end + + context "#initialize" do + context 'when a chunk exists' do + before do + job.trace.set('ABC') + end + + it { expect(chunked_io.size).to eq(3) } + end + + context 'when two chunks exist' do + before do + stub_buffer_size(4) + job.trace.set('ABCDEF') + end + + it { expect(chunked_io.size).to eq(6) } + end + + context 'when no chunks exists' do + it { expect(chunked_io.size).to eq(0) } + end + end + + context "#seek" do + subject { chunked_io.seek(pos, where) } + + before do + job.trace.set(sample_trace_raw) + end + + context 'when moves pos to end of the file' do + let(:pos) { 0 } + let(:where) { IO::SEEK_END } + + it { is_expected.to eq(sample_trace_raw.bytesize) } + end + + context 'when moves pos to middle of the file' do + let(:pos) { sample_trace_raw.bytesize / 2 } + let(:where) { IO::SEEK_SET } + + it { is_expected.to eq(pos) } + end + + context 'when moves pos around' do + it 'matches the result' do + expect(chunked_io.seek(0)).to eq(0) + expect(chunked_io.seek(100, IO::SEEK_CUR)).to eq(100) + expect { chunked_io.seek(sample_trace_raw.bytesize + 1, IO::SEEK_CUR) } + .to raise_error('new position is outside of file') + end + end + end + + context "#eof?" do + subject { chunked_io.eof? } + + before do + job.trace.set(sample_trace_raw) + end + + context 'when current pos is at end of the file' do + before do + chunked_io.seek(sample_trace_raw.bytesize, IO::SEEK_SET) + end + + it { is_expected.to be_truthy } + end + + context 'when current pos is not at end of the file' do + before do + chunked_io.seek(0, IO::SEEK_SET) + end + + it { is_expected.to be_falsey } + end + end + + context "#each_line" do + let(:string_io) { StringIO.new(sample_trace_raw) } + + context 'when buffer size is smaller than file size' do + before do + stub_buffer_size(sample_trace_raw.bytesize / 2) + job.trace.set(sample_trace_raw) + end + + it 'yields lines' do + expect { |b| chunked_io.each_line(&b) } + .to yield_successive_args(*string_io.each_line.to_a) + end + end + + context 'when buffer size is larger than file size' do + before do + stub_buffer_size(sample_trace_raw.bytesize * 2) + job.trace.set(sample_trace_raw) + end + + it 'calls get_chunk only once' do + expect_any_instance_of(Gitlab::Ci::Trace::ChunkedIO) + .to receive(:current_chunk).once.and_call_original + + chunked_io.each_line { |line| } + end + end + end + + context "#read" do + subject { chunked_io.read(length) } + + context 'when read the whole size' do + let(:length) { nil } + + context 'when buffer size is smaller than file size' do + before do + stub_buffer_size(sample_trace_raw.bytesize / 2) + job.trace.set(sample_trace_raw) + end + + it { is_expected.to eq(sample_trace_raw) } + end + + context 'when buffer size is larger than file size' do + before do + stub_buffer_size(sample_trace_raw.bytesize * 2) + job.trace.set(sample_trace_raw) + end + + it { is_expected.to eq(sample_trace_raw) } + end + end + + context 'when read only first 100 bytes' do + let(:length) { 100 } + + context 'when buffer size is smaller than file size' do + before do + stub_buffer_size(sample_trace_raw.bytesize / 2) + job.trace.set(sample_trace_raw) + end + + it 'reads a trace' do + is_expected.to eq(sample_trace_raw.byteslice(0, length)) + end + end + + context 'when buffer size is larger than file size' do + before do + stub_buffer_size(sample_trace_raw.bytesize * 2) + job.trace.set(sample_trace_raw) + end + + it 'reads a trace' do + is_expected.to eq(sample_trace_raw.byteslice(0, length)) + end + end + end + + context 'when tries to read oversize' do + let(:length) { sample_trace_raw.bytesize + 1000 } + + context 'when buffer size is smaller than file size' do + before do + stub_buffer_size(sample_trace_raw.bytesize / 2) + job.trace.set(sample_trace_raw) + end + + it 'reads a trace' do + is_expected.to eq(sample_trace_raw) + end + end + + context 'when buffer size is larger than file size' do + before do + stub_buffer_size(sample_trace_raw.bytesize * 2) + job.trace.set(sample_trace_raw) + end + + it 'reads a trace' do + is_expected.to eq(sample_trace_raw) + end + end + end + + context 'when tries to read 0 bytes' do + let(:length) { 0 } + + context 'when buffer size is smaller than file size' do + before do + stub_buffer_size(sample_trace_raw.bytesize / 2) + job.trace.set(sample_trace_raw) + end + + it 'reads a trace' do + is_expected.to be_empty + end + end + + context 'when buffer size is larger than file size' do + before do + stub_buffer_size(sample_trace_raw.bytesize * 2) + job.trace.set(sample_trace_raw) + end + + it 'reads a trace' do + is_expected.to be_empty + end + end + end + end + + context "#readline" do + subject { chunked_io.readline } + + let(:string_io) { StringIO.new(sample_trace_raw) } + + shared_examples 'all line matching' do + it do + (0...sample_trace_raw.lines.count).each do + expect(chunked_io.readline).to eq(string_io.readline) + end + end + end + + context 'when buffer size is smaller than file size' do + before do + stub_buffer_size(sample_trace_raw.bytesize / 2) + job.trace.set(sample_trace_raw) + end + + it_behaves_like 'all line matching' + end + + context 'when buffer size is larger than file size' do + before do + stub_buffer_size(sample_trace_raw.bytesize * 2) + job.trace.set(sample_trace_raw) + end + + it_behaves_like 'all line matching' + end + + context 'when pos is at middle of the file' do + before do + stub_buffer_size(sample_trace_raw.bytesize / 2) + job.trace.set(sample_trace_raw) + + chunked_io.seek(chunked_io.size / 2) + string_io.seek(string_io.size / 2) + end + + it 'reads from pos' do + expect(chunked_io.readline).to eq(string_io.readline) + end + end + end + + context "#write" do + subject { chunked_io.write(data) } + + let(:data) { sample_trace_raw } + + context 'when data does not exist' do + shared_examples 'writes a trace' do + it do + is_expected.to eq(data.bytesize) + + chunked_io.seek(0, IO::SEEK_SET) + expect(chunked_io.read).to eq(data) + end + end + + context 'when buffer size is smaller than file size' do + before do + stub_buffer_size(data.bytesize / 2) + end + + it_behaves_like 'writes a trace' + end + + context 'when buffer size is larger than file size' do + before do + stub_buffer_size(data.bytesize * 2) + end + + it_behaves_like 'writes a trace' + end + + context 'when data is nil' do + let(:data) { nil } + + it 'writes a trace' do + expect { subject } .to raise_error('Could not write empty data') + end + end + end + + context 'when data already exists' do + let(:exist_data) { 'exist data' } + + shared_examples 'appends a trace' do + it do + chunked_io.seek(0, IO::SEEK_END) + is_expected.to eq(data.bytesize) + + chunked_io.seek(0, IO::SEEK_SET) + expect(chunked_io.read).to eq(exist_data + data) + end + end + + context 'when buffer size is smaller than file size' do + before do + stub_buffer_size(sample_trace_raw.bytesize / 2) + job.trace.set(exist_data) + end + + it_behaves_like 'appends a trace' + end + + context 'when buffer size is larger than file size' do + before do + stub_buffer_size(sample_trace_raw.bytesize * 2) + job.trace.set(exist_data) + end + + it_behaves_like 'appends a trace' + end + end + end + + context "#truncate" do + subject { chunked_io.truncate(offset) } + + let(:offset) { 10 } + + context 'when data does not exist' do + shared_examples 'truncates a trace' do + it do + subject + + chunked_io.seek(0, IO::SEEK_SET) + expect(chunked_io.read).to eq(sample_trace_raw.byteslice(0, offset)) + end + end + + context 'when buffer size is smaller than file size' do + before do + stub_buffer_size(sample_trace_raw.bytesize / 2) + job.trace.set(sample_trace_raw) + end + + it_behaves_like 'truncates a trace' + end + + context 'when buffer size is larger than file size' do + before do + stub_buffer_size(sample_trace_raw.bytesize * 2) + job.trace.set(sample_trace_raw) + end + + it_behaves_like 'truncates a trace' + end + end + end + + context "#destroy!" do + subject { chunked_io.destroy! } + + before do + job.trace.set(sample_trace_raw) + end + + it 'deletes' do + expect { subject }.to change { chunked_io.size } + .from(sample_trace_raw.bytesize).to(0) + + expect(Ci::JobTraceChunk.where(job: job).count).to eq(0) + end + end +end diff --git a/spec/models/ci/job_trace_chunk_spec.rb b/spec/models/ci/job_trace_chunk_spec.rb index 6ad4b321c3fa..cb993885fa11 100644 --- a/spec/models/ci/job_trace_chunk_spec.rb +++ b/spec/models/ci/job_trace_chunk_spec.rb @@ -5,6 +5,7 @@ let(:chunk_index) { 0 } let(:data_store) { :redis } let(:raw_data) { nil } + let(:job_trace_chunk) do described_class.new(job: job, chunk_index: chunk_index, data_store: data_store, raw_data: raw_data) end diff --git a/spec/support/chunked_io/chunked_io_helpers.rb b/spec/support/chunked_io/chunked_io_helpers.rb new file mode 100644 index 000000000000..a95eb87d7b87 --- /dev/null +++ b/spec/support/chunked_io/chunked_io_helpers.rb @@ -0,0 +1,11 @@ +module ChunkedIOHelpers + def sample_trace_raw + @sample_trace_raw ||= File.read(expand_fixture_path('trace/sample_trace')) + .force_encoding(Encoding::BINARY) + end + + def stub_buffer_size(size) + stub_const('Ci::JobTraceChunk::CHUNK_SIZE', size) + stub_const('Gitlab::Ci::Trace::ChunkedIO::CHUNK_SIZE', size) + end +end -- GitLab From d42909955554b6f1eae15f744250d8ddf30dbbf9 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 6 Apr 2018 00:19:41 +0900 Subject: [PATCH 38/86] Add tests for Trace::Stream --- app/models/ci/job_trace_chunk.rb | 2 +- spec/lib/gitlab/ci/trace/stream_spec.rb | 546 +++++++++++++++--------- 2 files changed, 343 insertions(+), 205 deletions(-) diff --git a/app/models/ci/job_trace_chunk.rb b/app/models/ci/job_trace_chunk.rb index c2f05dd1f039..a68b2a16efba 100644 --- a/app/models/ci/job_trace_chunk.rb +++ b/app/models/ci/job_trace_chunk.rb @@ -24,7 +24,7 @@ def data raw_data else raise 'Unsupported data store' - end + end&.force_encoding(Encoding::BINARY) end def set_data(value) diff --git a/spec/lib/gitlab/ci/trace/stream_spec.rb b/spec/lib/gitlab/ci/trace/stream_spec.rb index e5555546fa8b..8ebe62ba88de 100644 --- a/spec/lib/gitlab/ci/trace/stream_spec.rb +++ b/spec/lib/gitlab/ci/trace/stream_spec.rb @@ -1,6 +1,12 @@ require 'spec_helper' -describe Gitlab::Ci::Trace::Stream do +describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do + set(:job) { create(:ci_build, :running) } + + before do + stub_feature_flags(ci_enable_live_trace: true) + end + describe 'delegates' do subject { described_class.new { nil } } @@ -15,333 +21,465 @@ end describe '#limit' do - let(:stream) do - described_class.new do - StringIO.new((1..8).to_a.join("\n")) + shared_examples_for 'limits' do + it 'if size is larger we start from beginning' do + stream.limit(20) + + expect(stream.tell).to eq(0) end - end - it 'if size is larger we start from beginning' do - stream.limit(20) + it 'if size is smaller we start from the end' do + stream.limit(2) - expect(stream.tell).to eq(0) - end + expect(stream.raw).to eq("8") + end - it 'if size is smaller we start from the end' do - stream.limit(2) + context 'when the trace contains ANSI sequence and Unicode' do + let(:stream) do + described_class.new do + File.open(expand_fixture_path('trace/ansi-sequence-and-unicode')) + end + end - expect(stream.raw).to eq("8") - end + it 'forwards to the next linefeed, case 1' do + stream.limit(7) - context 'when the trace contains ANSI sequence and Unicode' do - let(:stream) do - described_class.new do - File.open(expand_fixture_path('trace/ansi-sequence-and-unicode')) + result = stream.raw + + expect(result).to eq('') + expect(result.encoding).to eq(Encoding.default_external) end - end - it 'forwards to the next linefeed, case 1' do - stream.limit(7) + it 'forwards to the next linefeed, case 2' do + stream.limit(29) - result = stream.raw + result = stream.raw - expect(result).to eq('') - expect(result.encoding).to eq(Encoding.default_external) - end + expect(result).to eq("\e[01;32m許功蓋\e[0m\n") + expect(result.encoding).to eq(Encoding.default_external) + end - it 'forwards to the next linefeed, case 2' do - stream.limit(29) + # See https://gitlab.com/gitlab-org/gitlab-ce/issues/30796 + it 'reads in binary, output as Encoding.default_external' do + stream.limit(52) - result = stream.raw + result = stream.html - expect(result).to eq("\e[01;32m許功蓋\e[0m\n") - expect(result.encoding).to eq(Encoding.default_external) + expect(result).to eq("ヾ(´༎ຶД༎ຶ`)ノ
許功蓋
") + expect(result.encoding).to eq(Encoding.default_external) + end end + end - # See https://gitlab.com/gitlab-org/gitlab-ce/issues/30796 - it 'reads in binary, output as Encoding.default_external' do - stream.limit(52) + context 'when stream is StringIO' do + let(:stream) do + described_class.new do + StringIO.new((1..8).to_a.join("\n")) + end + end - result = stream.html + it_behaves_like 'limits' + end - expect(result).to eq("ヾ(´༎ຶД༎ຶ`)ノ
許功蓋
") - expect(result.encoding).to eq(Encoding.default_external) + context 'when stream is ChunkedIO' do + let(:stream) do + described_class.new do + Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io| + chunked_io.write((1..8).to_a.join("\n")) + chunked_io.seek(0, IO::SEEK_SET) + end + end end + + it_behaves_like 'limits' end end describe '#append' do - let(:tempfile) { Tempfile.new } + shared_examples_for 'appends' do + it "truncates and append content" do + stream.append("89", 4) + stream.seek(0) - let(:stream) do - described_class.new do - tempfile.write("12345678") - tempfile.rewind - tempfile + expect(stream.size).to eq(6) + expect(stream.raw).to eq("123489") end - end - after do - tempfile.unlink - end + it 'appends in binary mode' do + '😺'.force_encoding('ASCII-8BIT').each_char.with_index do |byte, offset| + stream.append(byte, offset) + end - it "truncates and append content" do - stream.append("89", 4) - stream.seek(0) + stream.seek(0) - expect(stream.size).to eq(6) - expect(stream.raw).to eq("123489") + expect(stream.size).to eq(4) + expect(stream.raw).to eq('😺') + end end - it 'appends in binary mode' do - '😺'.force_encoding('ASCII-8BIT').each_char.with_index do |byte, offset| - stream.append(byte, offset) + context 'when stream is StringIO' do + let(:tempfile) { Tempfile.new } + + let(:stream) do + described_class.new do + tempfile.write("12345678") + tempfile.rewind + tempfile + end end - stream.seek(0) + after do + tempfile.unlink + end - expect(stream.size).to eq(4) - expect(stream.raw).to eq('😺') + it_behaves_like 'appends' + end + + context 'when stream is ChunkedIO' do + let(:stream) do + described_class.new do + Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io| + chunked_io.write('12345678') + chunked_io.seek(0, IO::SEEK_SET) + end + end + end + + it_behaves_like 'appends' end end describe '#set' do - let(:stream) do - described_class.new do - StringIO.new("12345678") + shared_examples_for 'sets' do + before do + stream.set("8901") + end + + it "overwrite content" do + stream.seek(0) + + expect(stream.size).to eq(4) + expect(stream.raw).to eq("8901") end end - before do - stream.set("8901") + context 'when stream is StringIO' do + let(:stream) do + described_class.new do + StringIO.new("12345678") + end + end + + it_behaves_like 'sets' end - it "overwrite content" do - stream.seek(0) + context 'when stream is ChunkedIO' do + let(:stream) do + described_class.new do + Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io| + chunked_io.write('12345678') + chunked_io.seek(0, IO::SEEK_SET) + end + end + end - expect(stream.size).to eq(4) - expect(stream.raw).to eq("8901") + it_behaves_like 'sets' end end describe '#raw' do - let(:path) { __FILE__ } - let(:lines) { File.readlines(path) } - let(:stream) do - described_class.new do - File.open(path) + shared_examples_for 'sets' do + it 'returns all contents if last_lines is not specified' do + result = stream.raw + + expect(result).to eq(lines.join) + expect(result.encoding).to eq(Encoding.default_external) end - end - it 'returns all contents if last_lines is not specified' do - result = stream.raw + context 'limit max lines' do + before do + # specifying BUFFER_SIZE forces to seek backwards + allow(described_class).to receive(:BUFFER_SIZE) + .and_return(2) + end - expect(result).to eq(lines.join) - expect(result.encoding).to eq(Encoding.default_external) - end + it 'returns last few lines' do + result = stream.raw(last_lines: 2) - context 'limit max lines' do - before do - # specifying BUFFER_SIZE forces to seek backwards - allow(described_class).to receive(:BUFFER_SIZE) - .and_return(2) - end + expect(result).to eq(lines.last(2).join) + expect(result.encoding).to eq(Encoding.default_external) + end - it 'returns last few lines' do - result = stream.raw(last_lines: 2) + it 'returns everything if trying to get too many lines' do + result = stream.raw(last_lines: lines.size * 2) - expect(result).to eq(lines.last(2).join) - expect(result.encoding).to eq(Encoding.default_external) + expect(result).to eq(lines.join) + expect(result.encoding).to eq(Encoding.default_external) + end end + end - it 'returns everything if trying to get too many lines' do - result = stream.raw(last_lines: lines.size * 2) + let(:path) { __FILE__ } + let(:lines) { File.readlines(path) } - expect(result).to eq(lines.join) - expect(result.encoding).to eq(Encoding.default_external) + context 'when stream is File' do + let(:stream) do + described_class.new do + File.open(path) + end end end + + context 'when stream is ChunkedIO' do + let(:stream) do + described_class.new do + Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io| + chunked_io.write(File.binread(path)) + chunked_io.seek(0, IO::SEEK_SET) + end + end + end + + it_behaves_like 'sets' + end end describe '#html_with_state' do - let(:stream) do - described_class.new do - StringIO.new("1234") + shared_examples_for 'html_with_states' do + it 'returns html content with state' do + result = stream.html_with_state + + expect(result.html).to eq("1234") end - end - it 'returns html content with state' do - result = stream.html_with_state + context 'follow-up state' do + let!(:last_result) { stream.html_with_state } - expect(result.html).to eq("1234") - end + before do + stream.append("5678", 4) + stream.seek(0) + end - context 'follow-up state' do - let!(:last_result) { stream.html_with_state } + it "returns appended trace" do + result = stream.html_with_state(last_result.state) - before do - stream.append("5678", 4) - stream.seek(0) + expect(result.append).to be_truthy + expect(result.html).to eq("5678") + end + end + end + + context 'when stream is StringIO' do + let(:stream) do + described_class.new do + StringIO.new("1234") + end end - it "returns appended trace" do - result = stream.html_with_state(last_result.state) + it_behaves_like 'html_with_states' + end - expect(result.append).to be_truthy - expect(result.html).to eq("5678") + context 'when stream is ChunkedIO' do + let(:stream) do + described_class.new do + Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io| + chunked_io.write("1234") + chunked_io.seek(0, IO::SEEK_SET) + end + end end + + it_behaves_like 'html_with_states' end end describe '#html' do - let(:stream) do - described_class.new do - StringIO.new("12\n34\n56") + shared_examples_for 'htmls' do + it "returns html" do + expect(stream.html).to eq("12
34
56") + end + + it "returns html for last line only" do + expect(stream.html(last_lines: 1)).to eq("56") end end - it "returns html" do - expect(stream.html).to eq("12
34
56") + context 'when stream is StringIO' do + let(:stream) do + described_class.new do + StringIO.new("12\n34\n56") + end + end + + it_behaves_like 'htmls' end - it "returns html for last line only" do - expect(stream.html(last_lines: 1)).to eq("56") + context 'when stream is ChunkedIO' do + let(:stream) do + described_class.new do + Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io| + chunked_io.write("12\n34\n56") + chunked_io.seek(0, IO::SEEK_SET) + end + end + end + + it_behaves_like 'htmls' end end describe '#extract_coverage' do - let(:stream) do - described_class.new do - StringIO.new(data) - end - end + shared_examples_for 'extract_coverages' do + context 'valid content & regex' do + let(:data) { 'Coverage 1033 / 1051 LOC (98.29%) covered' } + let(:regex) { '\(\d+.\d+\%\) covered' } - subject { stream.extract_coverage(regex) } + it { is_expected.to eq("98.29") } + end - context 'valid content & regex' do - let(:data) { 'Coverage 1033 / 1051 LOC (98.29%) covered' } - let(:regex) { '\(\d+.\d+\%\) covered' } + context 'valid content & bad regex' do + let(:data) { 'Coverage 1033 / 1051 LOC (98.29%) covered\n' } + let(:regex) { 'very covered' } - it { is_expected.to eq("98.29") } - end + it { is_expected.to be_nil } + end - context 'valid content & bad regex' do - let(:data) { 'Coverage 1033 / 1051 LOC (98.29%) covered\n' } - let(:regex) { 'very covered' } + context 'no coverage content & regex' do + let(:data) { 'No coverage for today :sad:' } + let(:regex) { '\(\d+.\d+\%\) covered' } - it { is_expected.to be_nil } - end + it { is_expected.to be_nil } + end - context 'no coverage content & regex' do - let(:data) { 'No coverage for today :sad:' } - let(:regex) { '\(\d+.\d+\%\) covered' } + context 'multiple results in content & regex' do + let(:data) do + <<~HEREDOC + (98.39%) covered + (98.29%) covered + HEREDOC + end - it { is_expected.to be_nil } - end + let(:regex) { '\(\d+.\d+\%\) covered' } - context 'multiple results in content & regex' do - let(:data) do - <<~HEREDOC - (98.39%) covered - (98.29%) covered - HEREDOC + it 'returns the last matched coverage' do + is_expected.to eq("98.29") + end end - let(:regex) { '\(\d+.\d+\%\) covered' } + context 'when BUFFER_SIZE is smaller than stream.size' do + let(:data) { 'Coverage 1033 / 1051 LOC (98.29%) covered\n' } + let(:regex) { '\(\d+.\d+\%\) covered' } - it 'returns the last matched coverage' do - is_expected.to eq("98.29") + before do + stub_const('Gitlab::Ci::Trace::Stream::BUFFER_SIZE', 5) + end + + it { is_expected.to eq("98.29") } end - end - context 'when BUFFER_SIZE is smaller than stream.size' do - let(:data) { 'Coverage 1033 / 1051 LOC (98.29%) covered\n' } - let(:regex) { '\(\d+.\d+\%\) covered' } + context 'when regex is multi-byte char' do + let(:data) { '95.0 ゴッドファット\n' } + let(:regex) { '\d+\.\d+ ゴッドファット' } - before do - stub_const('Gitlab::Ci::Trace::Stream::BUFFER_SIZE', 5) + before do + stub_const('Gitlab::Ci::Trace::Stream::BUFFER_SIZE', 5) + end + + it { is_expected.to eq('95.0') } end - it { is_expected.to eq("98.29") } - end + context 'when BUFFER_SIZE is equal to stream.size' do + let(:data) { 'Coverage 1033 / 1051 LOC (98.29%) covered\n' } + let(:regex) { '\(\d+.\d+\%\) covered' } - context 'when regex is multi-byte char' do - let(:data) { '95.0 ゴッドファット\n' } - let(:regex) { '\d+\.\d+ ゴッドファット' } + before do + stub_const('Gitlab::Ci::Trace::Stream::BUFFER_SIZE', data.length) + end - before do - stub_const('Gitlab::Ci::Trace::Stream::BUFFER_SIZE', 5) + it { is_expected.to eq("98.29") } end - it { is_expected.to eq('95.0') } - end - - context 'when BUFFER_SIZE is equal to stream.size' do - let(:data) { 'Coverage 1033 / 1051 LOC (98.29%) covered\n' } - let(:regex) { '\(\d+.\d+\%\) covered' } + context 'using a regex capture' do + let(:data) { 'TOTAL 9926 3489 65%' } + let(:regex) { 'TOTAL\s+\d+\s+\d+\s+(\d{1,3}\%)' } - before do - stub_const('Gitlab::Ci::Trace::Stream::BUFFER_SIZE', data.length) + it { is_expected.to eq("65") } end - it { is_expected.to eq("98.29") } - end + context 'malicious regexp' do + let(:data) { malicious_text } + let(:regex) { malicious_regexp } - context 'using a regex capture' do - let(:data) { 'TOTAL 9926 3489 65%' } - let(:regex) { 'TOTAL\s+\d+\s+\d+\s+(\d{1,3}\%)' } + include_examples 'malicious regexp' + end - it { is_expected.to eq("65") } - end + context 'multi-line data with rooted regexp' do + let(:data) { "\n65%\n" } + let(:regex) { '^(\d+)\%$' } - context 'malicious regexp' do - let(:data) { malicious_text } - let(:regex) { malicious_regexp } + it { is_expected.to eq('65') } + end - include_examples 'malicious regexp' - end + context 'long line' do + let(:data) { 'a' * 80000 + '100%' + 'a' * 80000 } + let(:regex) { '\d+\%' } - context 'multi-line data with rooted regexp' do - let(:data) { "\n65%\n" } - let(:regex) { '^(\d+)\%$' } + it { is_expected.to eq('100') } + end - it { is_expected.to eq('65') } - end + context 'many lines' do + let(:data) { "foo\n" * 80000 + "100%\n" + "foo\n" * 80000 } + let(:regex) { '\d+\%' } - context 'long line' do - let(:data) { 'a' * 80000 + '100%' + 'a' * 80000 } - let(:regex) { '\d+\%' } + it { is_expected.to eq('100') } + end - it { is_expected.to eq('100') } - end + context 'empty regex' do + let(:data) { 'foo' } + let(:regex) { '' } - context 'many lines' do - let(:data) { "foo\n" * 80000 + "100%\n" + "foo\n" * 80000 } - let(:regex) { '\d+\%' } + it 'skips processing' do + expect(stream).not_to receive(:read) - it { is_expected.to eq('100') } - end + is_expected.to be_nil + end + end - context 'empty regex' do - let(:data) { 'foo' } - let(:regex) { '' } + context 'nil regex' do + let(:data) { 'foo' } + let(:regex) { nil } - it 'skips processing' do - expect(stream).not_to receive(:read) + it 'skips processing' do + expect(stream).not_to receive(:read) - is_expected.to be_nil + is_expected.to be_nil + end end end - context 'nil regex' do - let(:data) { 'foo' } - let(:regex) { nil } + subject { stream.extract_coverage(regex) } - it 'skips processing' do - expect(stream).not_to receive(:read) + context 'when stream is StringIO' do + let(:stream) do + described_class.new do + StringIO.new(data) + end + end + + it_behaves_like 'extract_coverages' + end - is_expected.to be_nil + context 'when stream is ChunkedIO' do + let(:stream) do + described_class.new do + Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io| + chunked_io.write(data) + chunked_io.seek(0, IO::SEEK_SET) + end + end end + + it_behaves_like 'extract_coverages' end end end -- GitLab From 8c86705206c9b53b204e7e5067e9ac5443b3e0f9 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 6 Apr 2018 00:42:25 +0900 Subject: [PATCH 39/86] Add spec for Ci::Trace --- spec/lib/gitlab/ci/trace_spec.rb | 45 ++++++++++++++++++++++++++++++-- 1 file changed, 43 insertions(+), 2 deletions(-) diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb index c246ce9cdf3e..8f639ef1e047 100644 --- a/spec/lib/gitlab/ci/trace_spec.rb +++ b/spec/lib/gitlab/ci/trace_spec.rb @@ -261,6 +261,15 @@ end end + shared_examples 'read successfully with ChunkedIO' do + it 'yields with source' do + trace.read do |stream| + expect(stream).to be_a(Gitlab::Ci::Trace::Stream) + expect(stream.stream).to be_a(Gitlab::Ci::Trace::ChunkedIO) + end + end + end + shared_examples 'failed to read' do it 'yields without source' do trace.read do |stream| @@ -302,6 +311,16 @@ it_behaves_like 'read successfully with StringIO' end + context 'when live trace exists' do + before do + Gitlab::Ci::Trace::ChunkedIO.new(build) do |stream| + stream.write('abc') + end + end + + it_behaves_like 'read successfully with ChunkedIO' + end + context 'when no sources exist' do it_behaves_like 'failed to read' end @@ -402,6 +421,28 @@ expect(trace.raw).to eq("data") end end + + context 'stored in database' do + before do + Gitlab::Ci::Trace::ChunkedIO.new(build) do |stream| + stream.write('abc') + end + end + + it "trace exist" do + expect(trace.exist?).to be(true) + end + + it "can be erased" do + trace.erase! + expect(trace.exist?).to be(false) + expect(Ci::JobTraceChunk.where(job: build)).not_to be_exist + end + + it "returns live trace data" do + expect(trace.raw).to eq("abc") + end + end end describe '#archive!' do @@ -471,7 +512,7 @@ expect(build.trace.exist?).to be_truthy expect(build.job_artifacts_trace.file.exists?).to be_truthy expect(build.job_artifacts_trace.file.filename).to eq('job.log') - expect(Gitlab::Ci::Trace::ChunkedFile::LiveTrace.exist?(build.id)).to be_falsy + expect(Ci::JobTraceChunk.where(job: build)).not_to be_exist expect(src_checksum) .to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest) expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum) @@ -485,7 +526,7 @@ build.reload expect(build.trace.exist?).to be_truthy expect(build.job_artifacts_trace).to be_nil - Gitlab::Ci::Trace::ChunkedFile::LiveTrace.new(build.id, nil, 'rb') do |stream| + Gitlab::Ci::Trace::ChunkedIO.new(build) do |stream| expect(stream.read).to eq(trace_raw) end end -- GitLab From 1a71dd049b627a396297e601aeb767f2b600e666 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 6 Apr 2018 00:57:05 +0900 Subject: [PATCH 40/86] Fix rubocop --- app/models/ci/job_trace_chunk.rb | 12 +++++------- lib/gitlab/ci/trace/chunked_io.rb | 2 +- spec/lib/gitlab/ci/trace/stream_spec.rb | 2 ++ spec/lib/gitlab/ci/trace_spec.rb | 12 +++++++++--- 4 files changed, 17 insertions(+), 11 deletions(-) diff --git a/app/models/ci/job_trace_chunk.rb b/app/models/ci/job_trace_chunk.rb index a68b2a16efba..38374907e322 100644 --- a/app/models/ci/job_trace_chunk.rb +++ b/app/models/ci/job_trace_chunk.rb @@ -13,14 +13,13 @@ class JobTraceChunk < ActiveRecord::Base enum data_store: { redis: 1, - db: 2, + db: 2 } def data - case - when redis? + if redis? redis_data - when db? + elsif db? raw_data else raise 'Unsupported data store' @@ -30,10 +29,9 @@ def data def set_data(value) raise 'too much data' if value.bytesize > CHUNK_SIZE - case - when redis? + if redis? redis_set_data(value) - when db? + elsif db? self.raw_data = value else raise 'Unsupported data store' diff --git a/lib/gitlab/ci/trace/chunked_io.rb b/lib/gitlab/ci/trace/chunked_io.rb index 7aef2ca56eb9..da9d79dc3290 100644 --- a/lib/gitlab/ci/trace/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_io.rb @@ -69,7 +69,7 @@ def each_line def read(length = nil, outbuf = "") out = "" - length = size - tell unless length + length ||= size - tell until length <= 0 || eof? data = chunk_slice_from_offset diff --git a/spec/lib/gitlab/ci/trace/stream_spec.rb b/spec/lib/gitlab/ci/trace/stream_spec.rb index 8ebe62ba88de..70b7d667a4d9 100644 --- a/spec/lib/gitlab/ci/trace/stream_spec.rb +++ b/spec/lib/gitlab/ci/trace/stream_spec.rb @@ -228,6 +228,8 @@ File.open(path) end end + + it_behaves_like 'sets' end context 'when stream is ChunkedIO' do diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb index 8f639ef1e047..d16590b63a9d 100644 --- a/spec/lib/gitlab/ci/trace_spec.rb +++ b/spec/lib/gitlab/ci/trace_spec.rb @@ -540,7 +540,9 @@ before do stub_feature_flags(ci_enable_live_trace: false) - build; src_path; src_checksum; # Initialize after set feature flag + build # Initialize after set feature flag + src_path + src_checksum end it_behaves_like 'archive trace file' @@ -571,7 +573,9 @@ before do stub_feature_flags(ci_enable_live_trace: false) - build; trace_content; src_checksum; # Initialize after set feature flag + build # Initialize after set feature flag + trace_content + src_checksum build.update_column(:trace, trace_content) end @@ -625,7 +629,9 @@ before do stub_feature_flags(ci_enable_live_trace: true) - build; trace_raw; src_checksum; # Initialize after set feature flag + build # Initialize after set feature flag + trace_raw + src_checksum end it_behaves_like 'archive trace file in ChunkedIO' -- GitLab From e1d318be9446868f6b935cdc9756c3c8ad7f73ef Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 6 Apr 2018 01:03:57 +0900 Subject: [PATCH 41/86] Add changelog --- changelogs/unreleased/live-trace-v2.yml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 changelogs/unreleased/live-trace-v2.yml diff --git a/changelogs/unreleased/live-trace-v2.yml b/changelogs/unreleased/live-trace-v2.yml new file mode 100644 index 000000000000..875a66bc5652 --- /dev/null +++ b/changelogs/unreleased/live-trace-v2.yml @@ -0,0 +1,5 @@ +--- +title: New CI Job live-trace architecture +merge_request: 18169 +author: +type: changed -- GitLab From 2c6b90f086d25e3e550b94506c2c04d69df08592 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 6 Apr 2018 01:41:39 +0900 Subject: [PATCH 42/86] Fix HttpIO and spec --- lib/gitlab/ci/trace/http_io.rb | 22 ++++++++++++++----- .../projects/jobs_controller_spec.rb | 3 ++- spec/support/http_io/http_io_helpers.rb | 3 ++- 3 files changed, 20 insertions(+), 8 deletions(-) diff --git a/lib/gitlab/ci/trace/http_io.rb b/lib/gitlab/ci/trace/http_io.rb index ac4308f4e2cb..cff924e27ef3 100644 --- a/lib/gitlab/ci/trace/http_io.rb +++ b/lib/gitlab/ci/trace/http_io.rb @@ -75,18 +75,28 @@ def each_line end end - def read(length = nil) + def read(length = nil, outbuf = "") out = "" - until eof? || (length && out.length >= length) + length ||= size - tell + + until length <= 0 || eof? data = get_chunk break if data.empty? - out << data - @tell += data.bytesize + chunk_bytes = [BUFFER_SIZE - chunk_offset, length].min + chunk_data = data.byteslice(0, chunk_bytes) + + out << chunk_data + @tell += chunk_data.bytesize + length -= chunk_data.bytesize end - out = out[0, length] if length && out.length > length + # If outbuf is passed, we put the output into the buffer. This supports IO.copy_stream functionality + if outbuf + outbuf.slice!(0, outbuf.bytesize) + outbuf << out + end out end @@ -158,7 +168,7 @@ def get_chunk # Provider: GCS # - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206 # - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPOK 200 - @chunk_range ||= (chunk_start...(chunk_start + @chunk.length)) + @chunk_range ||= (chunk_start...(chunk_start + @chunk.bytesize)) end @chunk[chunk_offset..BUFFER_SIZE] diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb index 31046c202e65..ba73f0251695 100644 --- a/spec/controllers/projects/jobs_controller_spec.rb +++ b/spec/controllers/projects/jobs_controller_spec.rb @@ -1,7 +1,7 @@ # coding: utf-8 require 'spec_helper' -describe Projects::JobsController do +describe Projects::JobsController, :clean_gitlab_redis_shared_state do include ApiHelpers include HttpIOHelpers @@ -10,6 +10,7 @@ let(:user) { create(:user) } before do + stub_feature_flags(ci_enable_live_trace: true) stub_not_protect_default_branch end diff --git a/spec/support/http_io/http_io_helpers.rb b/spec/support/http_io/http_io_helpers.rb index 31e07e720cdf..2c68c2cd9a6f 100644 --- a/spec/support/http_io/http_io_helpers.rb +++ b/spec/support/http_io/http_io_helpers.rb @@ -44,10 +44,11 @@ def range_trace_body(from, to) def remote_trace_body @remote_trace_body ||= File.read(expand_fixture_path('trace/sample_trace')) + .force_encoding(Encoding::BINARY) end def remote_trace_size - remote_trace_body.length + remote_trace_body.bytesize end def set_smaller_buffer_size_than(file_size) -- GitLab From 16cb6d63e86bbd0dc4ba291db2ae32d6fd955195 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 6 Apr 2018 01:53:15 +0900 Subject: [PATCH 43/86] Fix retry_build_service_spec.rb --- spec/services/ci/retry_build_service_spec.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb index 8de0bdf92e28..955894444eb4 100644 --- a/spec/services/ci/retry_build_service_spec.rb +++ b/spec/services/ci/retry_build_service_spec.rb @@ -30,7 +30,7 @@ runner_id tag_taggings taggings tags trigger_request_id user_id auto_canceled_by_id retried failure_reason artifacts_file_store artifacts_metadata_store - metadata].freeze + metadata chunks].freeze shared_examples 'build duplication' do let(:another_pipeline) { create(:ci_empty_pipeline, project: project) } -- GitLab From 3b97710c176f99eed00e05522447400a127a116d Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 6 Apr 2018 02:23:26 +0900 Subject: [PATCH 44/86] Added clean_gitlab_redis_shared_state to features/projects/jobs_spec.rb --- spec/features/projects/jobs_spec.rb | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb index 5d311f2dde33..95e074be2fe8 100644 --- a/spec/features/projects/jobs_spec.rb +++ b/spec/features/projects/jobs_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' require 'tempfile' -feature 'Jobs' do +feature 'Jobs', :clean_gitlab_redis_shared_state do let(:user) { create(:user) } let(:user_access_level) { :developer } let(:project) { create(:project, :repository) } @@ -15,6 +15,8 @@ end before do + stub_feature_flags(ci_enable_live_trace: true) + project.add_role(user, user_access_level) sign_in(user) end -- GitLab From 7297a06cb6d9a73c721b27fdc1941e04b17f5433 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 6 Apr 2018 04:41:45 +0900 Subject: [PATCH 45/86] Fix bunch of texts --- .flayignore | 1 + app/controllers/projects/jobs_controller.rb | 2 +- lib/gitlab/ci/trace/chunked_io.rb | 2 -- lib/gitlab/ci/trace/stream.rb | 6 +++- spec/features/projects/jobs_spec.rb | 40 --------------------- spec/lib/gitlab/ci/trace/chunked_io_spec.rb | 8 ----- spec/requests/api/runner_spec.rb | 3 +- 7 files changed, 9 insertions(+), 53 deletions(-) diff --git a/.flayignore b/.flayignore index 3d69bb2c985b..0c4eee10ffab 100644 --- a/.flayignore +++ b/.flayignore @@ -9,3 +9,4 @@ lib/gitlab/gitaly_client/operation_service.rb lib/gitlab/background_migration/* app/models/project_services/kubernetes_service.rb lib/gitlab/workhorse.rb +lib/gitlab/ci/trace/chunked_io.rb diff --git a/app/controllers/projects/jobs_controller.rb b/app/controllers/projects/jobs_controller.rb index 85e972d9731c..7213e185ee67 100644 --- a/app/controllers/projects/jobs_controller.rb +++ b/app/controllers/projects/jobs_controller.rb @@ -128,7 +128,7 @@ def raw if stream.file? send_file stream.path, type: 'text/plain; charset=utf-8', disposition: 'inline' else - render_404 + send_data stream.raw, type: 'text/plain; charset=utf-8', disposition: 'inline' end end end diff --git a/lib/gitlab/ci/trace/chunked_io.rb b/lib/gitlab/ci/trace/chunked_io.rb index da9d79dc3290..d768c0119686 100644 --- a/lib/gitlab/ci/trace/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_io.rb @@ -113,8 +113,6 @@ def readline end def write(data) - raise 'Could not write empty data' unless data.present? - start_pos = tell data = data.force_encoding(Encoding::BINARY) diff --git a/lib/gitlab/ci/trace/stream.rb b/lib/gitlab/ci/trace/stream.rb index 6cd791df42b3..8519dab82d24 100644 --- a/lib/gitlab/ci/trace/stream.rb +++ b/lib/gitlab/ci/trace/stream.rb @@ -8,7 +8,7 @@ class Stream attr_reader :stream - delegate :close, :tell, :seek, :size, :path, :url, :truncate, to: :stream, allow_nil: true + delegate :close, :tell, :seek, :size, :url, :truncate, to: :stream, allow_nil: true delegate :valid?, to: :stream, as: :present?, allow_nil: true @@ -25,6 +25,10 @@ def file? self.path.present? end + def file? + self.path if self.stream.respond_to?(:path) + end + def limit(last_bytes = LIMIT_SIZE) if last_bytes < size stream.seek(-last_bytes, IO::SEEK_END) diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb index 95e074be2fe8..b14b7103175f 100644 --- a/spec/features/projects/jobs_spec.rb +++ b/spec/features/projects/jobs_spec.rb @@ -15,8 +15,6 @@ end before do - stub_feature_flags(ci_enable_live_trace: true) - project.add_role(user, user_access_level) sign_in(user) end @@ -537,44 +535,6 @@ end end - context 'storage form' do - let(:existing_file) { Tempfile.new('existing-trace-file').path } - - before do - job.run! - end - - context 'when job has trace in file', :js do - before do - allow_any_instance_of(Gitlab::Ci::Trace) - .to receive(:paths) - .and_return([existing_file]) - end - - it 'sends the right headers' do - requests = inspect_requests(inject_headers: { 'X-Sendfile-Type' => 'X-Sendfile' }) do - visit raw_project_job_path(project, job) - end - expect(requests.first.response_headers['Content-Type']).to eq('text/plain; charset=utf-8') - expect(requests.first.response_headers['X-Sendfile']).to eq(existing_file) - end - end - - context 'when job has trace in the database', :js do - before do - allow_any_instance_of(Gitlab::Ci::Trace) - .to receive(:paths) - .and_return([]) - - visit project_job_path(project, job) - end - - it 'sends the right headers' do - expect(page).not_to have_selector('.js-raw-link-controller') - end - end - end - context "when visiting old URL" do let(:raw_job_url) do raw_project_job_path(project, job) diff --git a/spec/lib/gitlab/ci/trace/chunked_io_spec.rb b/spec/lib/gitlab/ci/trace/chunked_io_spec.rb index 97781faecd30..72211e82eab2 100644 --- a/spec/lib/gitlab/ci/trace/chunked_io_spec.rb +++ b/spec/lib/gitlab/ci/trace/chunked_io_spec.rb @@ -298,14 +298,6 @@ it_behaves_like 'writes a trace' end - - context 'when data is nil' do - let(:data) { nil } - - it 'writes a trace' do - expect { subject } .to raise_error('Could not write empty data') - end - end end context 'when data already exists' do diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb index 4f3420cc0ad2..284e89318123 100644 --- a/spec/requests/api/runner_spec.rb +++ b/spec/requests/api/runner_spec.rb @@ -1,11 +1,12 @@ require 'spec_helper' -describe API::Runner do +describe API::Runner, :clean_gitlab_redis_shared_state do include StubGitlabCalls let(:registration_token) { 'abcdefg123456' } before do + stub_feature_flags(ci_enable_live_trace: true) stub_gitlab_calls stub_application_setting(runners_registration_token: registration_token) allow_any_instance_of(Ci::Runner).to receive(:cache_attributes) -- GitLab From 4c6cb3cf0665ff8fe558451907b948a57f07d390 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 6 Apr 2018 15:07:55 +0900 Subject: [PATCH 46/86] Fix Stream#file? duplicates. And the spec --- lib/gitlab/ci/trace/stream.rb | 4 ---- spec/lib/gitlab/ci/trace/stream_spec.rb | 1 - 2 files changed, 5 deletions(-) diff --git a/lib/gitlab/ci/trace/stream.rb b/lib/gitlab/ci/trace/stream.rb index 68919e317a1f..bcdd4225ce7b 100644 --- a/lib/gitlab/ci/trace/stream.rb +++ b/lib/gitlab/ci/trace/stream.rb @@ -25,10 +25,6 @@ def file? self.path.present? end - def file? - self.path - end - def path self.stream.path if self.stream.respond_to?(:path) end diff --git a/spec/lib/gitlab/ci/trace/stream_spec.rb b/spec/lib/gitlab/ci/trace/stream_spec.rb index 70b7d667a4d9..e940a075cfef 100644 --- a/spec/lib/gitlab/ci/trace/stream_spec.rb +++ b/spec/lib/gitlab/ci/trace/stream_spec.rb @@ -17,7 +17,6 @@ it { is_expected.to delegate_method(:path).to(:stream) } it { is_expected.to delegate_method(:truncate).to(:stream) } it { is_expected.to delegate_method(:valid?).to(:stream).as(:present?) } - it { is_expected.to delegate_method(:file?).to(:path).as(:present?) } end describe '#limit' do -- GitLab From eaf29ccec0b8075d87f8444839c383d8ef66dfd7 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 6 Apr 2018 15:14:52 +0900 Subject: [PATCH 47/86] Use MEDIUMTEXT(16MB) type when Mysql is used --- db/migrate/20180326202229_create_ci_job_trace_chunks.rb | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/db/migrate/20180326202229_create_ci_job_trace_chunks.rb b/db/migrate/20180326202229_create_ci_job_trace_chunks.rb index abfaea9f54a1..e9f43eaf67f3 100644 --- a/db/migrate/20180326202229_create_ci_job_trace_chunks.rb +++ b/db/migrate/20180326202229_create_ci_job_trace_chunks.rb @@ -8,7 +8,9 @@ def change t.integer :job_id, null: false t.integer :chunk_index, null: false t.integer :data_store, null: false - t.text :raw_data + # Mysql needs MEDIUMTEXT type (up to 16MB) rather than TEXT (up to 64KB) + # Because 'raw_data' is always capped by Ci::JobTraceChunk::CHUNK_SIZE, which is 128KB + t.text :raw_data, limit: 16.megabytes - 1 t.foreign_key :ci_builds, column: :job_id, on_delete: :cascade t.index [:job_id, :chunk_index], unique: true -- GitLab From aaff5e452ecfdcab3b76873da37b864109703b18 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 6 Apr 2018 16:08:23 +0900 Subject: [PATCH 48/86] Add spec that proves trace can be recovered even if it had redis outage --- lib/api/helpers/runner.rb | 1 + spec/requests/api/runner_spec.rb | 24 +++++++++++++++++++ spec/support/chunked_io/chunked_io_helpers.rb | 5 ++++ 3 files changed, 30 insertions(+) diff --git a/lib/api/helpers/runner.rb b/lib/api/helpers/runner.rb index 35ac0b4cbcaf..382577cc931a 100644 --- a/lib/api/helpers/runner.rb +++ b/lib/api/helpers/runner.rb @@ -52,6 +52,7 @@ def authenticate_job! end def job_token_valid?(job) + # binding.pry token = (params[JOB_TOKEN_PARAM] || env[JOB_TOKEN_HEADER]).to_s token && job.valid_token?(token) end diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb index 847579807aed..d130b58fb1cf 100644 --- a/spec/requests/api/runner_spec.rb +++ b/spec/requests/api/runner_spec.rb @@ -2,6 +2,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do include StubGitlabCalls + include ChunkedIOHelpers let(:registration_token) { 'abcdefg123456' } @@ -865,6 +866,29 @@ def update_job(token = job.token, **params) expect(response.status).to eq(403) end end + + context 'when redis had an outage' do + it "recovers" do + # GitLab-Runner patchs + patch_the_trace + expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended' + + # GitLab-Rails enxounters an outage on Redis + redis_shared_state_outage! + expect(job.reload.trace.raw).to eq '' + + # GitLab-Runner patchs + patch_the_trace('hello', headers.merge({ 'Content-Range' => "28-32" })) + expect(response.status).to eq 202 + expect(response.header).to have_key 'Range' + expect(response.header['Range']).to eq '0-0' + expect(job.reload.trace.raw).to eq '' + + # GitLab-Runner re-patchs + patch_the_trace('BUILD TRACE appended appended hello') + expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended hello' + end + end end context 'when Runner makes a force-patch' do diff --git a/spec/support/chunked_io/chunked_io_helpers.rb b/spec/support/chunked_io/chunked_io_helpers.rb index a95eb87d7b87..57391e6d42aa 100644 --- a/spec/support/chunked_io/chunked_io_helpers.rb +++ b/spec/support/chunked_io/chunked_io_helpers.rb @@ -8,4 +8,9 @@ def stub_buffer_size(size) stub_const('Ci::JobTraceChunk::CHUNK_SIZE', size) stub_const('Gitlab::Ci::Trace::ChunkedIO::CHUNK_SIZE', size) end + + def redis_shared_state_outage! + Gitlab::Redis::SharedState.with(&:flushall) + Sidekiq.redis(&:flushall) + end end -- GitLab From cffee49f7ffca39cb0e522dacc9b777e45d22680 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 6 Apr 2018 19:30:23 +0900 Subject: [PATCH 49/86] Change Redis TTL to 1day. Fixing nitpicks --- app/models/ci/job_trace_chunk.rb | 10 +++++----- lib/api/helpers/runner.rb | 1 - lib/gitlab/ci/trace/chunked_io.rb | 10 +++++++--- spec/lib/gitlab/ci/trace/chunked_io_spec.rb | 4 +--- spec/lib/gitlab/ci/trace/stream_spec.rb | 2 +- 5 files changed, 14 insertions(+), 13 deletions(-) diff --git a/app/models/ci/job_trace_chunk.rb b/app/models/ci/job_trace_chunk.rb index 38374907e322..bec4405dbd61 100644 --- a/app/models/ci/job_trace_chunk.rb +++ b/app/models/ci/job_trace_chunk.rb @@ -9,7 +9,7 @@ class JobTraceChunk < ActiveRecord::Base default_value_for :data_store, :redis CHUNK_SIZE = 128.kilobytes - CHUNK_REDIS_TTL = 1.month + CHUNK_REDIS_TTL = 1.day enum data_store: { redis: 1, @@ -27,7 +27,7 @@ def data end def set_data(value) - raise 'too much data' if value.bytesize > CHUNK_SIZE + raise ArgumentError, 'too much data' if value.bytesize > CHUNK_SIZE if redis? redis_set_data(value) @@ -46,9 +46,9 @@ def truncate(offset = 0) end def append(new_data, offset) - current_data = self.data || "" - raise 'Offset is out of bound' if offset > current_data.bytesize || offset < 0 - raise 'Outside of chunk size' if CHUNK_SIZE < offset + new_data.bytesize + current_data = self.data.to_s + raise ArgumentError, 'Offset is out of bound' if offset > current_data.bytesize || offset < 0 + raise ArgumentError, 'Outside of chunk size' if CHUNK_SIZE < offset + new_data.bytesize self.set_data(current_data.byteslice(0, offset) + new_data) end diff --git a/lib/api/helpers/runner.rb b/lib/api/helpers/runner.rb index 382577cc931a..35ac0b4cbcaf 100644 --- a/lib/api/helpers/runner.rb +++ b/lib/api/helpers/runner.rb @@ -52,7 +52,6 @@ def authenticate_job! end def job_token_valid?(job) - # binding.pry token = (params[JOB_TOKEN_PARAM] || env[JOB_TOKEN_HEADER]).to_s token && job.valid_token?(token) end diff --git a/lib/gitlab/ci/trace/chunked_io.rb b/lib/gitlab/ci/trace/chunked_io.rb index d768c0119686..8dbe892df646 100644 --- a/lib/gitlab/ci/trace/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_io.rb @@ -19,7 +19,7 @@ def initialize(job, &block) @job = job @chunks_cache = [] @tell = 0 - @size = job_chunks.last.try(&:end_offset).to_i + @size = calculate_size yield self if block_given? end @@ -48,7 +48,7 @@ def seek(pos, where = IO::SEEK_SET) -1 end - raise 'new position is outside of file' if new_pos < 0 || new_pos > size + raise ArgumentError, 'new position is outside of file' if new_pos < 0 || new_pos > size @tell = new_pos end @@ -135,7 +135,7 @@ def write(data) end def truncate(offset) - raise 'Outside of file' if offset > size + raise ArgumentError, 'Outside of file' if offset > size @tell = offset @size = offset @@ -221,6 +221,10 @@ def ensure_chunk def job_chunks ::Ci::JobTraceChunk.where(job: job) end + + def calculate_size + job_chunks.order(chunk_index: :desc).last.try(&:end_offset).to_i + end end end end diff --git a/spec/lib/gitlab/ci/trace/chunked_io_spec.rb b/spec/lib/gitlab/ci/trace/chunked_io_spec.rb index 72211e82eab2..bcef21d5f71f 100644 --- a/spec/lib/gitlab/ci/trace/chunked_io_spec.rb +++ b/spec/lib/gitlab/ci/trace/chunked_io_spec.rb @@ -334,14 +334,12 @@ end context "#truncate" do - subject { chunked_io.truncate(offset) } - let(:offset) { 10 } context 'when data does not exist' do shared_examples 'truncates a trace' do it do - subject + chunked_io.truncate(offset) chunked_io.seek(0, IO::SEEK_SET) expect(chunked_io.read).to eq(sample_trace_raw.byteslice(0, offset)) diff --git a/spec/lib/gitlab/ci/trace/stream_spec.rb b/spec/lib/gitlab/ci/trace/stream_spec.rb index e940a075cfef..7b6f5bf83ff9 100644 --- a/spec/lib/gitlab/ci/trace/stream_spec.rb +++ b/spec/lib/gitlab/ci/trace/stream_spec.rb @@ -116,7 +116,7 @@ end end - context 'when stream is StringIO' do + context 'when stream is Tempfile' do let(:tempfile) { Tempfile.new } let(:stream) do -- GitLab From 8a9955cf5cca7ae09d3479d2386eb5a856d17601 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 6 Apr 2018 20:41:56 +0900 Subject: [PATCH 50/86] Fix wrong sql at calculation size --- lib/gitlab/ci/trace/chunked_io.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/gitlab/ci/trace/chunked_io.rb b/lib/gitlab/ci/trace/chunked_io.rb index 8dbe892df646..2caebe3c95e6 100644 --- a/lib/gitlab/ci/trace/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_io.rb @@ -223,7 +223,7 @@ def job_chunks end def calculate_size - job_chunks.order(chunk_index: :desc).last.try(&:end_offset).to_i + job_chunks.order(chunk_index: :desc).first.try(&:end_offset).to_i end end end -- GitLab From a79cbbd809e1de014ad76273a03f48382e9e298e Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 6 Apr 2018 21:02:11 +0900 Subject: [PATCH 51/86] Add rake task and timestamped migration file for mysql raw_data MIDIUMTEXT --- ...limits_ci_job_trace_chunks_raw_data_for_mysql.rb | 13 +++++++++++++ ...limits_ci_job_trace_chunks_raw_data_for_mysql.rb | 7 +++++++ db/schema.rb | 2 +- lib/tasks/migrate/add_limits_mysql.rake | 2 ++ 4 files changed, 23 insertions(+), 1 deletion(-) create mode 100644 db/migrate/20180406204716_add_limits_ci_job_trace_chunks_raw_data_for_mysql.rb create mode 100644 db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql.rb diff --git a/db/migrate/20180406204716_add_limits_ci_job_trace_chunks_raw_data_for_mysql.rb b/db/migrate/20180406204716_add_limits_ci_job_trace_chunks_raw_data_for_mysql.rb new file mode 100644 index 000000000000..e7343db7da0d --- /dev/null +++ b/db/migrate/20180406204716_add_limits_ci_job_trace_chunks_raw_data_for_mysql.rb @@ -0,0 +1,13 @@ +# See http://doc.gitlab.com/ce/development/migration_style_guide.html +# for more information on how to write migrations for GitLab. +require Rails.root.join('db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql') + +class AddLimitsCiJobTraceChunksRawDataForMysql < ActiveRecord::Migration + include Gitlab::Database::MigrationHelpers + + DOWNTIME = false + + def up + LimitsCiJobTraceChunksRawDataForMysql.new.up + end +end diff --git a/db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql.rb b/db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql.rb new file mode 100644 index 000000000000..64088efa70bf --- /dev/null +++ b/db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql.rb @@ -0,0 +1,7 @@ +class LimitsCiJobTraceChunksRawDataForMysql < ActiveRecord::Migration + def up + return unless Gitlab::Database.mysql? + + change_column :ci_job_trace_chunks, :raw_data, :text, limit: 16.megabytes - 1 #MEDIUMTEXT + end +end diff --git a/db/schema.rb b/db/schema.rb index 931941da81c9..69d7997b4bfd 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -11,7 +11,7 @@ # # It's strongly recommended that you check this file into your version control system. -ActiveRecord::Schema.define(version: 20180405101928) do +ActiveRecord::Schema.define(version: 20180406204716) do # These are extensions that must be enabled in order to support this database enable_extension "plpgsql" diff --git a/lib/tasks/migrate/add_limits_mysql.rake b/lib/tasks/migrate/add_limits_mysql.rake index 151f42a22229..3cdcdcdf8748 100644 --- a/lib/tasks/migrate/add_limits_mysql.rake +++ b/lib/tasks/migrate/add_limits_mysql.rake @@ -1,6 +1,7 @@ require Rails.root.join('db/migrate/limits_to_mysql') require Rails.root.join('db/migrate/markdown_cache_limits_to_mysql') require Rails.root.join('db/migrate/merge_request_diff_file_limits_to_mysql') +require Rails.root.join('db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql') desc "GitLab | Add limits to strings in mysql database" task add_limits_mysql: :environment do @@ -8,4 +9,5 @@ task add_limits_mysql: :environment do LimitsToMysql.new.up MarkdownCacheLimitsToMysql.new.up MergeRequestDiffFileLimitsToMysql.new.up + LimitsCiJobTraceChunksRawDataForMysql.new.up end -- GitLab From 47b01f592134d7a4e55d6832633436e2d098d4cc Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 6 Apr 2018 21:29:39 +0900 Subject: [PATCH 52/86] Add test for after_destroy :redis_delete_data hook --- spec/models/ci/job_trace_chunk_spec.rb | 61 ++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/spec/models/ci/job_trace_chunk_spec.rb b/spec/models/ci/job_trace_chunk_spec.rb index cb993885fa11..0daf5ca25c3e 100644 --- a/spec/models/ci/job_trace_chunk_spec.rb +++ b/spec/models/ci/job_trace_chunk_spec.rb @@ -316,4 +316,65 @@ end end end + + describe 'deletes data in redis after chunk record destroyed' do + let(:project) { create(:project) } + + before do + pipeline = create(:ci_pipeline, project: project) + create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) + create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) + create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) + end + + shared_examples_for 'deletes all job_trace_chunk and data in redis' do + it do + project.builds.each do |build| + Gitlab::Redis::SharedState.with do |redis| + redis.scan_each(match: "gitlab:ci:trace:#{build.id}:chunks:?") do |key| + expect(redis.exists(key)).to be_truthy + end + end + end + + expect(described_class.count).not_to eq(0) + + subject + + expect(described_class.count).to eq(0) + + project.builds.each do |build| + Gitlab::Redis::SharedState.with do |redis| + redis.scan_each(match: "gitlab:ci:trace:#{build.id}:chunks:?") do |key| + expect(redis.exists(key)).to be_falsey + end + end + end + end + end + + context 'when job_trace_chunk is destroyed' do + let(:subject) do + project.builds.each { |build| build.chunks.destroy_all } + end + + it_behaves_like 'deletes all job_trace_chunk and data in redis' + end + + context 'when job is destroyed' do + let(:subject) do + project.builds.destroy_all + end + + it_behaves_like 'deletes all job_trace_chunk and data in redis' + end + + context 'when project is destroyed' do + let(:subject) do + project.destroy! + end + + it_behaves_like 'deletes all job_trace_chunk and data in redis' + end + end end -- GitLab From 180267d6869ce94922b929893729441772da95a9 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 6 Apr 2018 22:43:12 +0900 Subject: [PATCH 53/86] Change Redis TTL to 1 week --- app/models/ci/job_trace_chunk.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/models/ci/job_trace_chunk.rb b/app/models/ci/job_trace_chunk.rb index bec4405dbd61..f45077f5f4f9 100644 --- a/app/models/ci/job_trace_chunk.rb +++ b/app/models/ci/job_trace_chunk.rb @@ -9,7 +9,7 @@ class JobTraceChunk < ActiveRecord::Base default_value_for :data_store, :redis CHUNK_SIZE = 128.kilobytes - CHUNK_REDIS_TTL = 1.day + CHUNK_REDIS_TTL = 1.week enum data_store: { redis: 1, -- GitLab From 76485cbf8ba555c929fd2f54ca2051a382760f20 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Sat, 7 Apr 2018 00:08:35 +0900 Subject: [PATCH 54/86] Add ExclusiveLock in Ci::JobTraceChunk --- app/models/ci/job_trace_chunk.rb | 72 +++++++++++++++++++------- spec/models/ci/job_trace_chunk_spec.rb | 11 ++++ 2 files changed, 63 insertions(+), 20 deletions(-) diff --git a/app/models/ci/job_trace_chunk.rb b/app/models/ci/job_trace_chunk.rb index f45077f5f4f9..383c6596a517 100644 --- a/app/models/ci/job_trace_chunk.rb +++ b/app/models/ci/job_trace_chunk.rb @@ -8,8 +8,13 @@ class JobTraceChunk < ActiveRecord::Base default_value_for :data_store, :redis + WriteError = Class.new(StandardError) + CHUNK_SIZE = 128.kilobytes CHUNK_REDIS_TTL = 1.week + LOCK_RETRY = 100 + LOCK_SLEEP = 1 + LOCK_TTL = 5.minutes enum data_store: { redis: 1, @@ -27,18 +32,20 @@ def data end def set_data(value) - raise ArgumentError, 'too much data' if value.bytesize > CHUNK_SIZE - - if redis? - redis_set_data(value) - elsif db? - self.raw_data = value - else - raise 'Unsupported data store' + in_lock do + raise ArgumentError, 'too much data' if value.bytesize > CHUNK_SIZE + + if redis? + redis_set_data(value) + elsif db? + self.raw_data = value + else + raise 'Unsupported data store' + end + + save! if changed? + schedule_to_db if fullfilled? end - - save! if changed? - schedule_to_db if fullfilled? end def truncate(offset = 0) @@ -70,11 +77,13 @@ def range end def use_database! - return if db? - return unless size > 0 + in_lock do + return if db? + return unless size > 0 - self.update!(raw_data: data, data_store: :db) - redis_delete_data + self.update!(raw_data: data, data_store: :db) + redis_delete_data + end end private @@ -91,24 +100,47 @@ def fullfilled? def redis_data Gitlab::Redis::SharedState.with do |redis| - redis.get(redis_key) + redis.get(redis_data_key) end end def redis_set_data(data) Gitlab::Redis::SharedState.with do |redis| - redis.set(redis_key, data, ex: CHUNK_REDIS_TTL) + redis.set(redis_data_key, data, ex: CHUNK_REDIS_TTL) end end def redis_delete_data Gitlab::Redis::SharedState.with do |redis| - redis.del(redis_key) + redis.del(redis_data_key) end end - def redis_key - "gitlab:ci:trace:#{job_id}:chunks:#{chunk_index}" + def redis_data_key + "gitlab:ci:trace:#{job_id}:chunks:#{chunk_index}:data" + end + + def redis_lock_key + "gitlab:ci:trace:#{job_id}:chunks:#{chunk_index}:lock" + end + + def in_lock + lease = Gitlab::ExclusiveLease.new(redis_lock_key, timeout: LOCK_TTL) + retry_count = 0 + + until uuid = lease.try_obtain + # Keep trying until we obtain the lease. To prevent hammering Redis too + # much we'll wait for a bit between retries. + sleep(LOCK_SLEEP) + break if LOCK_RETRY < (retry_count += 1) + end + + raise WriteError, 'Failed to obtain write lock' unless uuid + + self.reload if self.persisted? + return yield + ensure + Gitlab::ExclusiveLease.cancel(redis_lock_key, uuid) end end end diff --git a/spec/models/ci/job_trace_chunk_spec.rb b/spec/models/ci/job_trace_chunk_spec.rb index 0daf5ca25c3e..eb240de188f9 100644 --- a/spec/models/ci/job_trace_chunk_spec.rb +++ b/spec/models/ci/job_trace_chunk_spec.rb @@ -317,6 +317,17 @@ end end + describe 'ExclusiveLock' do + before do + allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain) { nil } + stub_const('Ci::JobTraceChunk::LOCK_RETRY', 1) + end + + it 'raise an error' do + expect { job_trace_chunk.append('ABC', 0) }.to raise_error('Failed to obtain write lock') + end + end + describe 'deletes data in redis after chunk record destroyed' do let(:project) { create(:project) } -- GitLab From d5740846f6622995302036ac822b758da60f2ba3 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Tue, 17 Apr 2018 14:31:53 +0900 Subject: [PATCH 55/86] Sanitize migration file. Added a comment on LimitsCiJobTraceChunksRawDataForMysql --- db/migrate/20180326202229_create_ci_job_trace_chunks.rb | 4 +--- db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql.rb | 2 ++ 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/db/migrate/20180326202229_create_ci_job_trace_chunks.rb b/db/migrate/20180326202229_create_ci_job_trace_chunks.rb index e9f43eaf67f3..abfaea9f54a1 100644 --- a/db/migrate/20180326202229_create_ci_job_trace_chunks.rb +++ b/db/migrate/20180326202229_create_ci_job_trace_chunks.rb @@ -8,9 +8,7 @@ def change t.integer :job_id, null: false t.integer :chunk_index, null: false t.integer :data_store, null: false - # Mysql needs MEDIUMTEXT type (up to 16MB) rather than TEXT (up to 64KB) - # Because 'raw_data' is always capped by Ci::JobTraceChunk::CHUNK_SIZE, which is 128KB - t.text :raw_data, limit: 16.megabytes - 1 + t.text :raw_data t.foreign_key :ci_builds, column: :job_id, on_delete: :cascade t.index [:job_id, :chunk_index], unique: true diff --git a/db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql.rb b/db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql.rb index 64088efa70bf..5e307ce73c90 100644 --- a/db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql.rb +++ b/db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql.rb @@ -2,6 +2,8 @@ class LimitsCiJobTraceChunksRawDataForMysql < ActiveRecord::Migration def up return unless Gitlab::Database.mysql? + # Mysql needs MEDIUMTEXT type (up to 16MB) rather than TEXT (up to 64KB) + # Because 'raw_data' is always capped by Ci::JobTraceChunk::CHUNK_SIZE, which is 128KB change_column :ci_job_trace_chunks, :raw_data, :text, limit: 16.megabytes - 1 #MEDIUMTEXT end end -- GitLab From 731118d349c53a712c7afa67adc2b457895af048 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Wed, 18 Apr 2018 14:12:43 +0900 Subject: [PATCH 56/86] Put out schedule_to_db from inclock to avoid deadlock --- app/models/ci/job_trace_chunk.rb | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/app/models/ci/job_trace_chunk.rb b/app/models/ci/job_trace_chunk.rb index 383c6596a517..5d8727e2b11a 100644 --- a/app/models/ci/job_trace_chunk.rb +++ b/app/models/ci/job_trace_chunk.rb @@ -32,9 +32,9 @@ def data end def set_data(value) - in_lock do - raise ArgumentError, 'too much data' if value.bytesize > CHUNK_SIZE + raise ArgumentError, 'too much data' if value.bytesize > CHUNK_SIZE + in_lock do if redis? redis_set_data(value) elsif db? @@ -44,8 +44,9 @@ def set_data(value) end save! if changed? - schedule_to_db if fullfilled? end + + schedule_to_db if fullfilled? end def truncate(offset = 0) -- GitLab From 1e817e0018af2c3fbb622ec74f02ae255e7be95f Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Wed, 18 Apr 2018 15:19:53 +0900 Subject: [PATCH 57/86] Align force_encoding strategy into Trace::Stream --- app/models/ci/job_trace_chunk.rb | 2 +- lib/gitlab/ci/trace.rb | 4 ++-- lib/gitlab/ci/trace/chunked_io.rb | 3 +-- lib/gitlab/ci/trace/stream.rb | 8 ++++++-- 4 files changed, 10 insertions(+), 7 deletions(-) diff --git a/app/models/ci/job_trace_chunk.rb b/app/models/ci/job_trace_chunk.rb index 5d8727e2b11a..371417e7ff57 100644 --- a/app/models/ci/job_trace_chunk.rb +++ b/app/models/ci/job_trace_chunk.rb @@ -28,7 +28,7 @@ def data raw_data else raise 'Unsupported data store' - end&.force_encoding(Encoding::BINARY) + end&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default end def set_data(value) diff --git a/lib/gitlab/ci/trace.rb b/lib/gitlab/ci/trace.rb index 6554c924e5c3..e4f924d2fe4b 100644 --- a/lib/gitlab/ci/trace.rb +++ b/lib/gitlab/ci/trace.rb @@ -43,7 +43,7 @@ def set(data) end def append(data, offset) - write do |stream| + write('a+b') do |stream| current_length = stream.size return -current_length unless current_length == offset @@ -75,7 +75,7 @@ def read stream&.close end - def write(mode = 'a+b') + def write(mode) stream = Gitlab::Ci::Trace::Stream.new do if current_path File.open(current_path, mode) diff --git a/lib/gitlab/ci/trace/chunked_io.rb b/lib/gitlab/ci/trace/chunked_io.rb index 2caebe3c95e6..6b4a9f61961f 100644 --- a/lib/gitlab/ci/trace/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_io.rb @@ -114,7 +114,6 @@ def readline def write(data) start_pos = tell - data = data.force_encoding(Encoding::BINARY) while tell < start_pos + data.bytesize # get slice from current offset till the end where it falls into chunk @@ -178,7 +177,7 @@ def chunk_slice_from_offset current_chunk.tap do |chunk| raise FailedToGetChunkError unless chunk - @chunk = chunk.data.force_encoding(Encoding::BINARY) + @chunk = chunk.data @chunk_range = chunk.range end end diff --git a/lib/gitlab/ci/trace/stream.rb b/lib/gitlab/ci/trace/stream.rb index bcdd4225ce7b..e78bca4be998 100644 --- a/lib/gitlab/ci/trace/stream.rb +++ b/lib/gitlab/ci/trace/stream.rb @@ -37,6 +37,8 @@ def limit(last_bytes = LIMIT_SIZE) end def append(data, offset) + data = data.force_encoding(Encoding::BINARY) + stream.truncate(offset) stream.seek(0, IO::SEEK_END) stream.write(data) @@ -44,6 +46,8 @@ def append(data, offset) end def set(data) + data = data.force_encoding(Encoding::BINARY) + stream.seek(0, IO::SEEK_SET) stream.write(data) stream.truncate(data.bytesize) @@ -126,11 +130,11 @@ def reverse_line buf += debris debris, *lines = buf.each_line.to_a lines.reverse_each do |line| - yield(line.force_encoding('UTF-8')) + yield(line.force_encoding(Encoding.default_external)) end end - yield(debris.force_encoding('UTF-8')) unless debris.empty? + yield(debris.force_encoding(Encoding.default_external)) unless debris.empty? end def read_backward(length) -- GitLab From 7c90fd774d0950212d56d35c019ec2f48ed42162 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Mon, 23 Apr 2018 14:26:04 +0900 Subject: [PATCH 58/86] Fix spec/features/projects/jobs_spec.rb --- spec/features/projects/jobs_spec.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb index f60a9e217b67..9d1c4cbad8bc 100644 --- a/spec/features/projects/jobs_spec.rb +++ b/spec/features/projects/jobs_spec.rb @@ -282,7 +282,7 @@ it 'loads job trace' do expect(page).to have_content 'BUILD TRACE' - job.trace.write do |stream| + job.trace.write('a+b') do |stream| stream.append(' and more trace', 11) end -- GitLab From bc43588ce99df167aa2c117a3c25e51e070c0f57 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Mon, 23 Apr 2018 15:01:39 +0900 Subject: [PATCH 59/86] Chnage raw_data type to binary from text --- db/migrate/20180326202229_create_ci_job_trace_chunks.rb | 2 +- db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql.rb | 2 +- db/schema.rb | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/db/migrate/20180326202229_create_ci_job_trace_chunks.rb b/db/migrate/20180326202229_create_ci_job_trace_chunks.rb index abfaea9f54a1..23bc478eb298 100644 --- a/db/migrate/20180326202229_create_ci_job_trace_chunks.rb +++ b/db/migrate/20180326202229_create_ci_job_trace_chunks.rb @@ -8,7 +8,7 @@ def change t.integer :job_id, null: false t.integer :chunk_index, null: false t.integer :data_store, null: false - t.text :raw_data + t.binary :raw_data t.foreign_key :ci_builds, column: :job_id, on_delete: :cascade t.index [:job_id, :chunk_index], unique: true diff --git a/db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql.rb b/db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql.rb index 5e307ce73c90..38310b7443ae 100644 --- a/db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql.rb +++ b/db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql.rb @@ -4,6 +4,6 @@ def up # Mysql needs MEDIUMTEXT type (up to 16MB) rather than TEXT (up to 64KB) # Because 'raw_data' is always capped by Ci::JobTraceChunk::CHUNK_SIZE, which is 128KB - change_column :ci_job_trace_chunks, :raw_data, :text, limit: 16.megabytes - 1 #MEDIUMTEXT + change_column :ci_job_trace_chunks, :raw_data, :binary, limit: 16.megabytes - 1 #MEDIUMTEXT end end diff --git a/db/schema.rb b/db/schema.rb index 0b70a3ffbebf..6e459b2c2866 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -375,7 +375,7 @@ t.integer "job_id", null: false t.integer "chunk_index", null: false t.integer "data_store", null: false - t.text "raw_data" + t.binary "raw_data" end add_index "ci_job_trace_chunks", ["job_id", "chunk_index"], name: "index_ci_job_trace_chunks_on_job_id_and_chunk_index", unique: true, using: :btree -- GitLab From 8a7654a52b75fadedbb374ef87e04668bef0ee48 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Mon, 23 Apr 2018 15:20:55 +0900 Subject: [PATCH 60/86] Fix statis analysys --- app/models/ci/job_trace_chunk.rb | 4 ++-- spec/lib/gitlab/ci/trace_spec.rb | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/app/models/ci/job_trace_chunk.rb b/app/models/ci/job_trace_chunk.rb index 371417e7ff57..aeab4d0c87c1 100644 --- a/app/models/ci/job_trace_chunk.rb +++ b/app/models/ci/job_trace_chunk.rb @@ -79,8 +79,8 @@ def range def use_database! in_lock do - return if db? - return unless size > 0 + break if db? + break unless size > 0 self.update!(raw_data: data, data_store: :db) redis_delete_data diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb index d16590b63a9d..4d80b58889b6 100644 --- a/spec/lib/gitlab/ci/trace_spec.rb +++ b/spec/lib/gitlab/ci/trace_spec.rb @@ -535,7 +535,7 @@ context 'when job does not have trace artifact' do context 'when trace file stored in default path' do let(:build) { create(:ci_build, :success, :trace_live) } - let(:src_path) { trace.read { |s| return s.path } } + let(:src_path) { trace.read { |s| s.path } } let(:src_checksum) { Digest::SHA256.file(src_path).hexdigest } before do -- GitLab From ac6eb51b806823c9e953b30f537014a4b329828b Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Tue, 24 Apr 2018 14:16:42 +0900 Subject: [PATCH 61/86] Rename SwapTraceChunkWorker to BuildTraceSwapChunkWorker --- app/models/ci/job_trace_chunk.rb | 2 +- app/workers/all_queues.yml | 2 +- ...trace_chunk_worker.rb => build_trace_swap_chunk_worker.rb} | 2 +- spec/models/ci/job_trace_chunk_spec.rb | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) rename app/workers/{swap_trace_chunk_worker.rb => build_trace_swap_chunk_worker.rb} (89%) diff --git a/app/models/ci/job_trace_chunk.rb b/app/models/ci/job_trace_chunk.rb index aeab4d0c87c1..47302265fb7c 100644 --- a/app/models/ci/job_trace_chunk.rb +++ b/app/models/ci/job_trace_chunk.rb @@ -92,7 +92,7 @@ def use_database! def schedule_to_db return if db? - SwapTraceChunkWorker.perform_async(id) + BuildTraceSwapChunkWorker.perform_async(id) end def fullfilled? diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml index 571a9455325c..e02de6a0830b 100644 --- a/app/workers/all_queues.yml +++ b/app/workers/all_queues.yml @@ -65,7 +65,7 @@ - pipeline_processing:pipeline_update - pipeline_processing:stage_update - pipeline_processing:update_head_pipeline_for_merge_request -- pipeline_processing:swap_trace_chunk +- pipeline_processing:build_trace_swap_chunk - repository_check:repository_check_clear - repository_check:repository_check_single_repository diff --git a/app/workers/swap_trace_chunk_worker.rb b/app/workers/build_trace_swap_chunk_worker.rb similarity index 89% rename from app/workers/swap_trace_chunk_worker.rb rename to app/workers/build_trace_swap_chunk_worker.rb index 6b30cfa2a486..29b7cd4f808f 100644 --- a/app/workers/swap_trace_chunk_worker.rb +++ b/app/workers/build_trace_swap_chunk_worker.rb @@ -1,4 +1,4 @@ -class SwapTraceChunkWorker +class BuildTraceSwapChunkWorker include ApplicationWorker include PipelineQueue diff --git a/spec/models/ci/job_trace_chunk_spec.rb b/spec/models/ci/job_trace_chunk_spec.rb index eb240de188f9..b01dfd097e6f 100644 --- a/spec/models/ci/job_trace_chunk_spec.rb +++ b/spec/models/ci/job_trace_chunk_spec.rb @@ -71,7 +71,7 @@ let(:value) { 'a' * described_class::CHUNK_SIZE } it 'schedules stashing data' do - expect(SwapTraceChunkWorker).to receive(:perform_async).once + expect(BuildTraceSwapChunkWorker).to receive(:perform_async).once subject end @@ -108,7 +108,7 @@ context 'when fullfilled chunk size' do it 'does not schedule stashing data' do - expect(SwapTraceChunkWorker).not_to receive(:perform_async) + expect(BuildTraceSwapChunkWorker).not_to receive(:perform_async) subject end -- GitLab From 9d6fe7bfdf9ff3f68ee73baa0e3d0aa7df13c351 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Thu, 26 Apr 2018 15:06:04 +0900 Subject: [PATCH 62/86] Refactoring ci_job_trace to ci_build_trace --- app/models/ci/build.rb | 3 +- ...ob_trace_chunk.rb => build_trace_chunk.rb} | 8 +- app/workers/build_trace_swap_chunk_worker.rb | 6 +- ...0326202229_create_ci_build_trace_chunks.rb | 17 ++++ ...180326202229_create_ci_job_trace_chunks.rb | 17 ---- ..._build_trace_chunks_raw_data_for_mysql.rb} | 6 +- ...i_build_trace_chunks_raw_data_for_mysql.rb | 9 ++ ..._ci_job_trace_chunks_raw_data_for_mysql.rb | 9 -- db/schema.rb | 22 ++--- lib/gitlab/ci/trace.rb | 12 +-- lib/gitlab/ci/trace/chunked_io.rb | 22 ++--- lib/tasks/migrate/add_limits_mysql.rake | 4 +- ..._trace_chunks.rb => build_trace_chunks.rb} | 2 +- spec/lib/gitlab/ci/trace/chunked_io_spec.rb | 50 +++++------ spec/lib/gitlab/ci/trace/stream_spec.rb | 16 ++-- spec/lib/gitlab/ci/trace_spec.rb | 4 +- ...hunk_spec.rb => build_trace_chunk_spec.rb} | 90 +++++++++---------- spec/support/chunked_io/chunked_io_helpers.rb | 2 +- 18 files changed, 149 insertions(+), 150 deletions(-) rename app/models/ci/{job_trace_chunk.rb => build_trace_chunk.rb} (92%) create mode 100644 db/migrate/20180326202229_create_ci_build_trace_chunks.rb delete mode 100644 db/migrate/20180326202229_create_ci_job_trace_chunks.rb rename db/migrate/{20180406204716_add_limits_ci_job_trace_chunks_raw_data_for_mysql.rb => 20180406204716_add_limits_ci_build_trace_chunks_raw_data_for_mysql.rb} (50%) create mode 100644 db/migrate/limits_ci_build_trace_chunks_raw_data_for_mysql.rb delete mode 100644 db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql.rb rename spec/factories/ci/{job_trace_chunks.rb => build_trace_chunks.rb} (60%) rename spec/models/ci/{job_trace_chunk_spec.rb => build_trace_chunk_spec.rb} (74%) diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb index 56216093293d..61a0299d4fb6 100644 --- a/app/models/ci/build.rb +++ b/app/models/ci/build.rb @@ -19,14 +19,13 @@ class Build < CommitStatus has_one :last_deployment, -> { order('deployments.id DESC') }, as: :deployable, class_name: 'Deployment' has_many :trace_sections, class_name: 'Ci::BuildTraceSection' + has_many :trace_chunks, class_name: 'Ci::BuildTraceChunk', foreign_key: :build_id, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy, inverse_of: :job # rubocop:disable Cop/ActiveRecordDependent has_one :job_artifacts_archive, -> { where(file_type: Ci::JobArtifact.file_types[:archive]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id has_one :job_artifacts_metadata, -> { where(file_type: Ci::JobArtifact.file_types[:metadata]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id has_one :job_artifacts_trace, -> { where(file_type: Ci::JobArtifact.file_types[:trace]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id - has_many :chunks, class_name: 'Ci::JobTraceChunk', foreign_key: :job_id, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent - has_one :metadata, class_name: 'Ci::BuildMetadata' delegate :timeout, to: :metadata, prefix: true, allow_nil: true delegate :gitlab_deploy_token, to: :project diff --git a/app/models/ci/job_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb similarity index 92% rename from app/models/ci/job_trace_chunk.rb rename to app/models/ci/build_trace_chunk.rb index 47302265fb7c..f3beb6d4156c 100644 --- a/app/models/ci/job_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -1,8 +1,8 @@ module Ci - class JobTraceChunk < ActiveRecord::Base + class BuildTraceChunk < ActiveRecord::Base extend Gitlab::Ci::Model - belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id + belongs_to :build, class_name: "Ci::Build", foreign_key: :build_id after_destroy :redis_delete_data, if: :redis? @@ -118,11 +118,11 @@ def redis_delete_data end def redis_data_key - "gitlab:ci:trace:#{job_id}:chunks:#{chunk_index}:data" + "gitlab:ci:trace:#{build_id}:chunks:#{chunk_index}:data" end def redis_lock_key - "gitlab:ci:trace:#{job_id}:chunks:#{chunk_index}:lock" + "gitlab:ci:trace:#{build_id}:chunks:#{chunk_index}:lock" end def in_lock diff --git a/app/workers/build_trace_swap_chunk_worker.rb b/app/workers/build_trace_swap_chunk_worker.rb index 29b7cd4f808f..1392b9d34e1e 100644 --- a/app/workers/build_trace_swap_chunk_worker.rb +++ b/app/workers/build_trace_swap_chunk_worker.rb @@ -4,9 +4,9 @@ class BuildTraceSwapChunkWorker queue_namespace :pipeline_processing - def perform(job_trace_chunk_id) - Ci::JobTraceChunk.find_by(id: job_trace_chunk_id).try do |job_trace_chunk| - job_trace_chunk.use_database! + def perform(build_trace_chunk_id) + Ci::BuildTraceChunk.find_by(id: build_trace_chunk_id).try do |build_trace_chunk| + build_trace_chunk.use_database! end end end diff --git a/db/migrate/20180326202229_create_ci_build_trace_chunks.rb b/db/migrate/20180326202229_create_ci_build_trace_chunks.rb new file mode 100644 index 000000000000..fb3f5786e856 --- /dev/null +++ b/db/migrate/20180326202229_create_ci_build_trace_chunks.rb @@ -0,0 +1,17 @@ +class CreateCiBuildTraceChunks < ActiveRecord::Migration + include Gitlab::Database::MigrationHelpers + + DOWNTIME = false + + def change + create_table :ci_build_trace_chunks, id: :bigserial do |t| + t.integer :build_id, null: false + t.integer :chunk_index, null: false + t.integer :data_store, null: false + t.binary :raw_data + + t.foreign_key :ci_builds, column: :build_id, on_delete: :cascade + t.index [:build_id, :chunk_index], unique: true + end + end +end diff --git a/db/migrate/20180326202229_create_ci_job_trace_chunks.rb b/db/migrate/20180326202229_create_ci_job_trace_chunks.rb deleted file mode 100644 index 23bc478eb298..000000000000 --- a/db/migrate/20180326202229_create_ci_job_trace_chunks.rb +++ /dev/null @@ -1,17 +0,0 @@ -class CreateCiJobTraceChunks < ActiveRecord::Migration - include Gitlab::Database::MigrationHelpers - - DOWNTIME = false - - def change - create_table :ci_job_trace_chunks, id: :bigserial do |t| - t.integer :job_id, null: false - t.integer :chunk_index, null: false - t.integer :data_store, null: false - t.binary :raw_data - - t.foreign_key :ci_builds, column: :job_id, on_delete: :cascade - t.index [:job_id, :chunk_index], unique: true - end - end -end diff --git a/db/migrate/20180406204716_add_limits_ci_job_trace_chunks_raw_data_for_mysql.rb b/db/migrate/20180406204716_add_limits_ci_build_trace_chunks_raw_data_for_mysql.rb similarity index 50% rename from db/migrate/20180406204716_add_limits_ci_job_trace_chunks_raw_data_for_mysql.rb rename to db/migrate/20180406204716_add_limits_ci_build_trace_chunks_raw_data_for_mysql.rb index e7343db7da0d..0f2734853e6b 100644 --- a/db/migrate/20180406204716_add_limits_ci_job_trace_chunks_raw_data_for_mysql.rb +++ b/db/migrate/20180406204716_add_limits_ci_build_trace_chunks_raw_data_for_mysql.rb @@ -1,13 +1,13 @@ # See http://doc.gitlab.com/ce/development/migration_style_guide.html # for more information on how to write migrations for GitLab. -require Rails.root.join('db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql') +require Rails.root.join('db/migrate/limits_ci_build_trace_chunks_raw_data_for_mysql') -class AddLimitsCiJobTraceChunksRawDataForMysql < ActiveRecord::Migration +class AddLimitsCiBuildTraceChunksRawDataForMysql < ActiveRecord::Migration include Gitlab::Database::MigrationHelpers DOWNTIME = false def up - LimitsCiJobTraceChunksRawDataForMysql.new.up + LimitsCiBuildTraceChunksRawDataForMysql.new.up end end diff --git a/db/migrate/limits_ci_build_trace_chunks_raw_data_for_mysql.rb b/db/migrate/limits_ci_build_trace_chunks_raw_data_for_mysql.rb new file mode 100644 index 000000000000..e1771912c3cd --- /dev/null +++ b/db/migrate/limits_ci_build_trace_chunks_raw_data_for_mysql.rb @@ -0,0 +1,9 @@ +class LimitsCiBuildTraceChunksRawDataForMysql < ActiveRecord::Migration + def up + return unless Gitlab::Database.mysql? + + # Mysql needs MEDIUMTEXT type (up to 16MB) rather than TEXT (up to 64KB) + # Because 'raw_data' is always capped by Ci::BuildTraceChunk::CHUNK_SIZE, which is 128KB + change_column :ci_build_trace_chunks, :raw_data, :binary, limit: 16.megabytes - 1 #MEDIUMTEXT + end +end diff --git a/db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql.rb b/db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql.rb deleted file mode 100644 index 38310b7443ae..000000000000 --- a/db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql.rb +++ /dev/null @@ -1,9 +0,0 @@ -class LimitsCiJobTraceChunksRawDataForMysql < ActiveRecord::Migration - def up - return unless Gitlab::Database.mysql? - - # Mysql needs MEDIUMTEXT type (up to 16MB) rather than TEXT (up to 64KB) - # Because 'raw_data' is always capped by Ci::JobTraceChunk::CHUNK_SIZE, which is 128KB - change_column :ci_job_trace_chunks, :raw_data, :binary, limit: 16.megabytes - 1 #MEDIUMTEXT - end -end diff --git a/db/schema.rb b/db/schema.rb index 19ae9cbf4439..06232f688945 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -11,7 +11,7 @@ # # It's strongly recommended that you check this file into your version control system. -ActiveRecord::Schema.define(version: 20180425131009) do +ActiveRecord::Schema.define(version: 20180425205249) do # These are extensions that must be enabled in order to support this database enable_extension "plpgsql" @@ -246,6 +246,15 @@ add_index "chat_teams", ["namespace_id"], name: "index_chat_teams_on_namespace_id", unique: true, using: :btree + create_table "ci_build_trace_chunks", id: :bigserial, force: :cascade do |t| + t.integer "build_id", null: false + t.integer "chunk_index", null: false + t.integer "data_store", null: false + t.binary "raw_data" + end + + add_index "ci_build_trace_chunks", ["build_id", "chunk_index"], name: "index_ci_build_trace_chunks_on_build_id_and_chunk_index", unique: true, using: :btree + create_table "ci_build_trace_section_names", force: :cascade do |t| t.integer "project_id", null: false t.string "name", null: false @@ -371,15 +380,6 @@ add_index "ci_job_artifacts", ["job_id", "file_type"], name: "index_ci_job_artifacts_on_job_id_and_file_type", unique: true, using: :btree add_index "ci_job_artifacts", ["project_id"], name: "index_ci_job_artifacts_on_project_id", using: :btree - create_table "ci_job_trace_chunks", id: :bigserial, force: :cascade do |t| - t.integer "job_id", null: false - t.integer "chunk_index", null: false - t.integer "data_store", null: false - t.binary "raw_data" - end - - add_index "ci_job_trace_chunks", ["job_id", "chunk_index"], name: "index_ci_job_trace_chunks_on_job_id_and_chunk_index", unique: true, using: :btree - create_table "ci_pipeline_schedule_variables", force: :cascade do |t| t.string "key", null: false t.text "value" @@ -2075,6 +2075,7 @@ add_foreign_key "boards", "namespaces", column: "group_id", on_delete: :cascade add_foreign_key "boards", "projects", name: "fk_f15266b5f9", on_delete: :cascade add_foreign_key "chat_teams", "namespaces", on_delete: :cascade + add_foreign_key "ci_build_trace_chunks", "ci_builds", column: "build_id", on_delete: :cascade add_foreign_key "ci_build_trace_section_names", "projects", on_delete: :cascade add_foreign_key "ci_build_trace_sections", "ci_build_trace_section_names", column: "section_name_id", name: "fk_264e112c66", on_delete: :cascade add_foreign_key "ci_build_trace_sections", "ci_builds", column: "build_id", name: "fk_4ebe41f502", on_delete: :cascade @@ -2087,7 +2088,6 @@ add_foreign_key "ci_group_variables", "namespaces", column: "group_id", name: "fk_33ae4d58d8", on_delete: :cascade add_foreign_key "ci_job_artifacts", "ci_builds", column: "job_id", on_delete: :cascade add_foreign_key "ci_job_artifacts", "projects", on_delete: :cascade - add_foreign_key "ci_job_trace_chunks", "ci_builds", column: "job_id", on_delete: :cascade add_foreign_key "ci_pipeline_schedule_variables", "ci_pipeline_schedules", column: "pipeline_schedule_id", name: "fk_41c35fda51", on_delete: :cascade add_foreign_key "ci_pipeline_schedules", "projects", name: "fk_8ead60fcc4", on_delete: :cascade add_foreign_key "ci_pipeline_schedules", "users", column: "owner_id", name: "fk_9ea99f58d2", on_delete: :nullify diff --git a/lib/gitlab/ci/trace.rb b/lib/gitlab/ci/trace.rb index 79838cabdb57..65c8b9118c67 100644 --- a/lib/gitlab/ci/trace.rb +++ b/lib/gitlab/ci/trace.rb @@ -54,14 +54,14 @@ def append(data, offset) end def exist? - trace_artifact&.exists? || job.chunks.any? || current_path.present? || old_trace.present? + trace_artifact&.exists? || job.trace_chunks.any? || current_path.present? || old_trace.present? end def read stream = Gitlab::Ci::Trace::Stream.new do if trace_artifact trace_artifact.open - elsif job.chunks.any? + elsif job.trace_chunks.any? Gitlab::Ci::Trace::ChunkedIO.new(job) elsif current_path File.open(current_path, "rb") @@ -100,7 +100,7 @@ def erase! FileUtils.rm(trace_path, force: true) end - job.chunks.destroy_all + job.trace_chunks.destroy_all job.erase_old_trace! end @@ -108,7 +108,7 @@ def archive! raise ArchiveError, 'Already archived' if trace_artifact raise ArchiveError, 'Job is not finished yet' unless job.complete? - if job.chunks.any? + if job.trace_chunks.any? Gitlab::Ci::Trace::ChunkedIO.new(job) do |stream| archive_stream!(stream) stream.destroy! @@ -130,7 +130,7 @@ def archive! def archive_stream!(stream) clone_file!(stream, JobArtifactUploader.workhorse_upload_path) do |clone_path| - create_job_trace!(job, clone_path) + create_build_trace!(job, clone_path) end end @@ -146,7 +146,7 @@ def clone_file!(src_stream, temp_dir) end end - def create_job_trace!(job, path) + def create_build_trace!(job, path) File.open(path) do |stream| job.create_job_artifacts_trace!( project: job.project, diff --git a/lib/gitlab/ci/trace/chunked_io.rb b/lib/gitlab/ci/trace/chunked_io.rb index 6b4a9f61961f..877be08b219e 100644 --- a/lib/gitlab/ci/trace/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_io.rb @@ -5,18 +5,18 @@ module Gitlab module Ci class Trace class ChunkedIO - CHUNK_SIZE = ::Ci::JobTraceChunk::CHUNK_SIZE + CHUNK_SIZE = ::Ci::BuildTraceChunk::CHUNK_SIZE FailedToGetChunkError = Class.new(StandardError) - attr_reader :job + attr_reader :build attr_reader :tell, :size attr_reader :chunk, :chunk_range alias_method :pos, :tell - def initialize(job, &block) - @job = job + def initialize(build, &block) + @build = build @chunks_cache = [] @tell = 0 @size = calculate_size @@ -140,7 +140,7 @@ def truncate(offset) @size = offset # remove all next chunks - job_chunks.where('chunk_index > ?', chunk_index).destroy_all + trace_chunks.where('chunk_index > ?', chunk_index).destroy_all # truncate current chunk current_chunk.truncate(chunk_offset) if chunk_offset != 0 @@ -157,7 +157,7 @@ def present? end def destroy! - job_chunks.destroy_all + trace_chunks.destroy_all @tell = @size = 0 ensure invalidate_chunk_cache @@ -206,23 +206,23 @@ def invalidate_chunk_cache end def current_chunk - @chunks_cache[chunk_index] ||= job_chunks.find_by(chunk_index: chunk_index) + @chunks_cache[chunk_index] ||= trace_chunks.find_by(chunk_index: chunk_index) end def build_chunk - @chunks_cache[chunk_index] = ::Ci::JobTraceChunk.new(job: job, chunk_index: chunk_index) + @chunks_cache[chunk_index] = ::Ci::BuildTraceChunk.new(build: build, chunk_index: chunk_index) end def ensure_chunk current_chunk || build_chunk end - def job_chunks - ::Ci::JobTraceChunk.where(job: job) + def trace_chunks + ::Ci::BuildTraceChunk.where(build: build) end def calculate_size - job_chunks.order(chunk_index: :desc).first.try(&:end_offset).to_i + trace_chunks.order(chunk_index: :desc).first.try(&:end_offset).to_i end end end diff --git a/lib/tasks/migrate/add_limits_mysql.rake b/lib/tasks/migrate/add_limits_mysql.rake index 3cdcdcdf8748..c6204f89de4c 100644 --- a/lib/tasks/migrate/add_limits_mysql.rake +++ b/lib/tasks/migrate/add_limits_mysql.rake @@ -1,7 +1,7 @@ require Rails.root.join('db/migrate/limits_to_mysql') require Rails.root.join('db/migrate/markdown_cache_limits_to_mysql') require Rails.root.join('db/migrate/merge_request_diff_file_limits_to_mysql') -require Rails.root.join('db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql') +require Rails.root.join('db/migrate/limits_ci_build_trace_chunks_raw_data_for_mysql') desc "GitLab | Add limits to strings in mysql database" task add_limits_mysql: :environment do @@ -9,5 +9,5 @@ task add_limits_mysql: :environment do LimitsToMysql.new.up MarkdownCacheLimitsToMysql.new.up MergeRequestDiffFileLimitsToMysql.new.up - LimitsCiJobTraceChunksRawDataForMysql.new.up + LimitsCiBuildTraceChunksRawDataForMysql.new.up end diff --git a/spec/factories/ci/job_trace_chunks.rb b/spec/factories/ci/build_trace_chunks.rb similarity index 60% rename from spec/factories/ci/job_trace_chunks.rb rename to spec/factories/ci/build_trace_chunks.rb index e2cc2e77dda1..be13a84a47c0 100644 --- a/spec/factories/ci/job_trace_chunks.rb +++ b/spec/factories/ci/build_trace_chunks.rb @@ -1,5 +1,5 @@ FactoryBot.define do - factory :ci_job_trace_chunk, class: Ci::JobTraceChunk do + factory :ci_build_trace_chunk, class: Ci::BuildTraceChunk do job factory: :ci_build chunk_index 0 data_store :redis diff --git a/spec/lib/gitlab/ci/trace/chunked_io_spec.rb b/spec/lib/gitlab/ci/trace/chunked_io_spec.rb index bcef21d5f71f..6259b952adda 100644 --- a/spec/lib/gitlab/ci/trace/chunked_io_spec.rb +++ b/spec/lib/gitlab/ci/trace/chunked_io_spec.rb @@ -3,8 +3,8 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do include ChunkedIOHelpers - set(:job) { create(:ci_build, :running) } - let(:chunked_io) { described_class.new(job) } + set(:build) { create(:ci_build, :running) } + let(:chunked_io) { described_class.new(build) } before do stub_feature_flags(ci_enable_live_trace: true) @@ -13,7 +13,7 @@ context "#initialize" do context 'when a chunk exists' do before do - job.trace.set('ABC') + build.trace.set('ABC') end it { expect(chunked_io.size).to eq(3) } @@ -22,7 +22,7 @@ context 'when two chunks exist' do before do stub_buffer_size(4) - job.trace.set('ABCDEF') + build.trace.set('ABCDEF') end it { expect(chunked_io.size).to eq(6) } @@ -37,7 +37,7 @@ subject { chunked_io.seek(pos, where) } before do - job.trace.set(sample_trace_raw) + build.trace.set(sample_trace_raw) end context 'when moves pos to end of the file' do @@ -68,7 +68,7 @@ subject { chunked_io.eof? } before do - job.trace.set(sample_trace_raw) + build.trace.set(sample_trace_raw) end context 'when current pos is at end of the file' do @@ -94,7 +94,7 @@ context 'when buffer size is smaller than file size' do before do stub_buffer_size(sample_trace_raw.bytesize / 2) - job.trace.set(sample_trace_raw) + build.trace.set(sample_trace_raw) end it 'yields lines' do @@ -106,7 +106,7 @@ context 'when buffer size is larger than file size' do before do stub_buffer_size(sample_trace_raw.bytesize * 2) - job.trace.set(sample_trace_raw) + build.trace.set(sample_trace_raw) end it 'calls get_chunk only once' do @@ -127,7 +127,7 @@ context 'when buffer size is smaller than file size' do before do stub_buffer_size(sample_trace_raw.bytesize / 2) - job.trace.set(sample_trace_raw) + build.trace.set(sample_trace_raw) end it { is_expected.to eq(sample_trace_raw) } @@ -136,7 +136,7 @@ context 'when buffer size is larger than file size' do before do stub_buffer_size(sample_trace_raw.bytesize * 2) - job.trace.set(sample_trace_raw) + build.trace.set(sample_trace_raw) end it { is_expected.to eq(sample_trace_raw) } @@ -149,7 +149,7 @@ context 'when buffer size is smaller than file size' do before do stub_buffer_size(sample_trace_raw.bytesize / 2) - job.trace.set(sample_trace_raw) + build.trace.set(sample_trace_raw) end it 'reads a trace' do @@ -160,7 +160,7 @@ context 'when buffer size is larger than file size' do before do stub_buffer_size(sample_trace_raw.bytesize * 2) - job.trace.set(sample_trace_raw) + build.trace.set(sample_trace_raw) end it 'reads a trace' do @@ -175,7 +175,7 @@ context 'when buffer size is smaller than file size' do before do stub_buffer_size(sample_trace_raw.bytesize / 2) - job.trace.set(sample_trace_raw) + build.trace.set(sample_trace_raw) end it 'reads a trace' do @@ -186,7 +186,7 @@ context 'when buffer size is larger than file size' do before do stub_buffer_size(sample_trace_raw.bytesize * 2) - job.trace.set(sample_trace_raw) + build.trace.set(sample_trace_raw) end it 'reads a trace' do @@ -201,7 +201,7 @@ context 'when buffer size is smaller than file size' do before do stub_buffer_size(sample_trace_raw.bytesize / 2) - job.trace.set(sample_trace_raw) + build.trace.set(sample_trace_raw) end it 'reads a trace' do @@ -212,7 +212,7 @@ context 'when buffer size is larger than file size' do before do stub_buffer_size(sample_trace_raw.bytesize * 2) - job.trace.set(sample_trace_raw) + build.trace.set(sample_trace_raw) end it 'reads a trace' do @@ -238,7 +238,7 @@ context 'when buffer size is smaller than file size' do before do stub_buffer_size(sample_trace_raw.bytesize / 2) - job.trace.set(sample_trace_raw) + build.trace.set(sample_trace_raw) end it_behaves_like 'all line matching' @@ -247,7 +247,7 @@ context 'when buffer size is larger than file size' do before do stub_buffer_size(sample_trace_raw.bytesize * 2) - job.trace.set(sample_trace_raw) + build.trace.set(sample_trace_raw) end it_behaves_like 'all line matching' @@ -256,7 +256,7 @@ context 'when pos is at middle of the file' do before do stub_buffer_size(sample_trace_raw.bytesize / 2) - job.trace.set(sample_trace_raw) + build.trace.set(sample_trace_raw) chunked_io.seek(chunked_io.size / 2) string_io.seek(string_io.size / 2) @@ -316,7 +316,7 @@ context 'when buffer size is smaller than file size' do before do stub_buffer_size(sample_trace_raw.bytesize / 2) - job.trace.set(exist_data) + build.trace.set(exist_data) end it_behaves_like 'appends a trace' @@ -325,7 +325,7 @@ context 'when buffer size is larger than file size' do before do stub_buffer_size(sample_trace_raw.bytesize * 2) - job.trace.set(exist_data) + build.trace.set(exist_data) end it_behaves_like 'appends a trace' @@ -349,7 +349,7 @@ context 'when buffer size is smaller than file size' do before do stub_buffer_size(sample_trace_raw.bytesize / 2) - job.trace.set(sample_trace_raw) + build.trace.set(sample_trace_raw) end it_behaves_like 'truncates a trace' @@ -358,7 +358,7 @@ context 'when buffer size is larger than file size' do before do stub_buffer_size(sample_trace_raw.bytesize * 2) - job.trace.set(sample_trace_raw) + build.trace.set(sample_trace_raw) end it_behaves_like 'truncates a trace' @@ -370,14 +370,14 @@ subject { chunked_io.destroy! } before do - job.trace.set(sample_trace_raw) + build.trace.set(sample_trace_raw) end it 'deletes' do expect { subject }.to change { chunked_io.size } .from(sample_trace_raw.bytesize).to(0) - expect(Ci::JobTraceChunk.where(job: job).count).to eq(0) + expect(Ci::BuildTraceChunk.where(build: build).count).to eq(0) end end end diff --git a/spec/lib/gitlab/ci/trace/stream_spec.rb b/spec/lib/gitlab/ci/trace/stream_spec.rb index 7b6f5bf83ff9..4f49958dd33d 100644 --- a/spec/lib/gitlab/ci/trace/stream_spec.rb +++ b/spec/lib/gitlab/ci/trace/stream_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do - set(:job) { create(:ci_build, :running) } + set(:build) { create(:ci_build, :running) } before do stub_feature_flags(ci_enable_live_trace: true) @@ -83,7 +83,7 @@ context 'when stream is ChunkedIO' do let(:stream) do described_class.new do - Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io| + Gitlab::Ci::Trace::ChunkedIO.new(build).tap do |chunked_io| chunked_io.write((1..8).to_a.join("\n")) chunked_io.seek(0, IO::SEEK_SET) end @@ -137,7 +137,7 @@ context 'when stream is ChunkedIO' do let(:stream) do described_class.new do - Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io| + Gitlab::Ci::Trace::ChunkedIO.new(build).tap do |chunked_io| chunked_io.write('12345678') chunked_io.seek(0, IO::SEEK_SET) end @@ -175,7 +175,7 @@ context 'when stream is ChunkedIO' do let(:stream) do described_class.new do - Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io| + Gitlab::Ci::Trace::ChunkedIO.new(build).tap do |chunked_io| chunked_io.write('12345678') chunked_io.seek(0, IO::SEEK_SET) end @@ -234,7 +234,7 @@ context 'when stream is ChunkedIO' do let(:stream) do described_class.new do - Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io| + Gitlab::Ci::Trace::ChunkedIO.new(build).tap do |chunked_io| chunked_io.write(File.binread(path)) chunked_io.seek(0, IO::SEEK_SET) end @@ -283,7 +283,7 @@ context 'when stream is ChunkedIO' do let(:stream) do described_class.new do - Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io| + Gitlab::Ci::Trace::ChunkedIO.new(build).tap do |chunked_io| chunked_io.write("1234") chunked_io.seek(0, IO::SEEK_SET) end @@ -318,7 +318,7 @@ context 'when stream is ChunkedIO' do let(:stream) do described_class.new do - Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io| + Gitlab::Ci::Trace::ChunkedIO.new(build).tap do |chunked_io| chunked_io.write("12\n34\n56") chunked_io.seek(0, IO::SEEK_SET) end @@ -473,7 +473,7 @@ context 'when stream is ChunkedIO' do let(:stream) do described_class.new do - Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io| + Gitlab::Ci::Trace::ChunkedIO.new(build).tap do |chunked_io| chunked_io.write(data) chunked_io.seek(0, IO::SEEK_SET) end diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb index 4d80b58889b6..faa23461a915 100644 --- a/spec/lib/gitlab/ci/trace_spec.rb +++ b/spec/lib/gitlab/ci/trace_spec.rb @@ -436,7 +436,7 @@ it "can be erased" do trace.erase! expect(trace.exist?).to be(false) - expect(Ci::JobTraceChunk.where(job: build)).not_to be_exist + expect(Ci::BuildTraceChunk.where(build: build)).not_to be_exist end it "returns live trace data" do @@ -512,7 +512,7 @@ expect(build.trace.exist?).to be_truthy expect(build.job_artifacts_trace.file.exists?).to be_truthy expect(build.job_artifacts_trace.file.filename).to eq('job.log') - expect(Ci::JobTraceChunk.where(job: build)).not_to be_exist + expect(Ci::BuildTraceChunk.where(build: build)).not_to be_exist expect(src_checksum) .to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest) expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum) diff --git a/spec/models/ci/job_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb similarity index 74% rename from spec/models/ci/job_trace_chunk_spec.rb rename to spec/models/ci/build_trace_chunk_spec.rb index b01dfd097e6f..988e23146d24 100644 --- a/spec/models/ci/job_trace_chunk_spec.rb +++ b/spec/models/ci/build_trace_chunk_spec.rb @@ -1,13 +1,13 @@ require 'spec_helper' -describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do - set(:job) { create(:ci_build, :running) } +describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do + set(:build) { create(:ci_build, :running) } let(:chunk_index) { 0 } let(:data_store) { :redis } let(:raw_data) { nil } - - let(:job_trace_chunk) do - described_class.new(job: job, chunk_index: chunk_index, data_store: data_store, raw_data: raw_data) + + let(:build_trace_chunk) do + described_class.new(build: build, chunk_index: chunk_index, data_store: data_store, raw_data: raw_data) end describe 'CHUNK_SIZE' do @@ -17,13 +17,13 @@ end describe '#data' do - subject { job_trace_chunk.data } + subject { build_trace_chunk.data } context 'when data_store is redis' do let(:data_store) { :redis } before do - job_trace_chunk.send(:redis_set_data, 'Sample data in redis') + build_trace_chunk.send(:redis_set_data, 'Sample data in redis') end it { is_expected.to eq('Sample data in redis') } @@ -38,7 +38,7 @@ context 'when data_store is others' do before do - job_trace_chunk.send(:write_attribute, :data_store, -1) + build_trace_chunk.send(:write_attribute, :data_store, -1) end it { expect { subject }.to raise_error('Unsupported data store') } @@ -46,7 +46,7 @@ end describe '#set_data' do - subject { job_trace_chunk.set_data(value) } + subject { build_trace_chunk.set_data(value) } let(:value) { 'Sample data' } @@ -60,11 +60,11 @@ let(:data_store) { :redis } it do - expect(job_trace_chunk.send(:redis_data)).to be_nil + expect(build_trace_chunk.send(:redis_data)).to be_nil subject - expect(job_trace_chunk.send(:redis_data)).to eq(value) + expect(build_trace_chunk.send(:redis_data)).to eq(value) end context 'when fullfilled chunk size' do @@ -82,26 +82,26 @@ let(:data_store) { :db } it 'sets data' do - expect(job_trace_chunk.raw_data).to be_nil + expect(build_trace_chunk.raw_data).to be_nil subject - expect(job_trace_chunk.raw_data).to eq(value) - expect(job_trace_chunk.persisted?).to be_truthy + expect(build_trace_chunk.raw_data).to eq(value) + expect(build_trace_chunk.persisted?).to be_truthy end context 'when raw_data is not changed' do it 'does not execute UPDATE' do - expect(job_trace_chunk.raw_data).to be_nil - job_trace_chunk.save! + expect(build_trace_chunk.raw_data).to be_nil + build_trace_chunk.save! # First set expect(ActiveRecord::QueryRecorder.new { subject }.count).to be > 0 - expect(job_trace_chunk.raw_data).to eq(value) - expect(job_trace_chunk.persisted?).to be_truthy + expect(build_trace_chunk.raw_data).to eq(value) + expect(build_trace_chunk.persisted?).to be_truthy # Second set - job_trace_chunk.reload + build_trace_chunk.reload expect(ActiveRecord::QueryRecorder.new { subject }.count).to be(0) end end @@ -117,7 +117,7 @@ context 'when data_store is others' do before do - job_trace_chunk.send(:write_attribute, :data_store, -1) + build_trace_chunk.send(:write_attribute, :data_store, -1) end it { expect { subject }.to raise_error('Unsupported data store') } @@ -125,7 +125,7 @@ end describe '#truncate' do - subject { job_trace_chunk.truncate(offset) } + subject { build_trace_chunk.truncate(offset) } shared_examples_for 'truncates' do context 'when offset is negative' do @@ -146,7 +146,7 @@ it 'truncates' do subject - expect(job_trace_chunk.data).to eq(data.byteslice(0, offset)) + expect(build_trace_chunk.data).to eq(data.byteslice(0, offset)) end end end @@ -156,7 +156,7 @@ let(:data) { 'Sample data in redis' } before do - job_trace_chunk.send(:redis_set_data, data) + build_trace_chunk.send(:redis_set_data, data) end it_behaves_like 'truncates' @@ -172,7 +172,7 @@ end describe '#append' do - subject { job_trace_chunk.append(new_data, offset) } + subject { build_trace_chunk.append(new_data, offset) } let(:new_data) { 'Sample new data' } let(:offset) { 0 } @@ -203,7 +203,7 @@ it 'appends' do subject - expect(job_trace_chunk.data).to eq(total_data) + expect(build_trace_chunk.data).to eq(total_data) end end @@ -213,7 +213,7 @@ it 'appends' do subject - expect(job_trace_chunk.data).to eq(data.byteslice(0, offset) + new_data) + expect(build_trace_chunk.data).to eq(data.byteslice(0, offset) + new_data) end end end @@ -223,7 +223,7 @@ let(:data) { 'Sample data in redis' } before do - job_trace_chunk.send(:redis_set_data, data) + build_trace_chunk.send(:redis_set_data, data) end it_behaves_like 'appends' @@ -239,7 +239,7 @@ end describe '#size' do - subject { job_trace_chunk.size } + subject { build_trace_chunk.size } context 'when data_store is redis' do let(:data_store) { :redis } @@ -248,7 +248,7 @@ let(:data) { 'Sample data in redis' } before do - job_trace_chunk.send(:redis_set_data, data) + build_trace_chunk.send(:redis_set_data, data) end it { is_expected.to eq(data.bytesize) } @@ -276,7 +276,7 @@ end describe '#use_database!' do - subject { job_trace_chunk.use_database! } + subject { build_trace_chunk.use_database! } context 'when data_store is redis' do let(:data_store) { :redis } @@ -285,19 +285,19 @@ let(:data) { 'Sample data in redis' } before do - job_trace_chunk.send(:redis_set_data, data) + build_trace_chunk.send(:redis_set_data, data) end it 'stashes the data' do - expect(job_trace_chunk.data_store).to eq('redis') - expect(job_trace_chunk.send(:redis_data)).to eq(data) - expect(job_trace_chunk.raw_data).to be_nil + expect(build_trace_chunk.data_store).to eq('redis') + expect(build_trace_chunk.send(:redis_data)).to eq(data) + expect(build_trace_chunk.raw_data).to be_nil subject - expect(job_trace_chunk.data_store).to eq('db') - expect(job_trace_chunk.send(:redis_data)).to be_nil - expect(job_trace_chunk.raw_data).to eq(data) + expect(build_trace_chunk.data_store).to eq('db') + expect(build_trace_chunk.send(:redis_data)).to be_nil + expect(build_trace_chunk.raw_data).to eq(data) end end @@ -320,11 +320,11 @@ describe 'ExclusiveLock' do before do allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain) { nil } - stub_const('Ci::JobTraceChunk::LOCK_RETRY', 1) + stub_const('Ci::BuildTraceChunk::LOCK_RETRY', 1) end it 'raise an error' do - expect { job_trace_chunk.append('ABC', 0) }.to raise_error('Failed to obtain write lock') + expect { build_trace_chunk.append('ABC', 0) }.to raise_error('Failed to obtain write lock') end end @@ -338,7 +338,7 @@ create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) end - shared_examples_for 'deletes all job_trace_chunk and data in redis' do + shared_examples_for 'deletes all build_trace_chunk and data in redis' do it do project.builds.each do |build| Gitlab::Redis::SharedState.with do |redis| @@ -364,20 +364,20 @@ end end - context 'when job_trace_chunk is destroyed' do + context 'when build_trace_chunk is destroyed' do let(:subject) do project.builds.each { |build| build.chunks.destroy_all } end - it_behaves_like 'deletes all job_trace_chunk and data in redis' + it_behaves_like 'deletes all build_trace_chunk and data in redis' end - context 'when job is destroyed' do + context 'when build is destroyed' do let(:subject) do project.builds.destroy_all end - it_behaves_like 'deletes all job_trace_chunk and data in redis' + it_behaves_like 'deletes all build_trace_chunk and data in redis' end context 'when project is destroyed' do @@ -385,7 +385,7 @@ project.destroy! end - it_behaves_like 'deletes all job_trace_chunk and data in redis' + it_behaves_like 'deletes all build_trace_chunk and data in redis' end end end diff --git a/spec/support/chunked_io/chunked_io_helpers.rb b/spec/support/chunked_io/chunked_io_helpers.rb index 57391e6d42aa..4238a4b3e947 100644 --- a/spec/support/chunked_io/chunked_io_helpers.rb +++ b/spec/support/chunked_io/chunked_io_helpers.rb @@ -5,7 +5,7 @@ def sample_trace_raw end def stub_buffer_size(size) - stub_const('Ci::JobTraceChunk::CHUNK_SIZE', size) + stub_const('Ci::BuildTraceChunk::CHUNK_SIZE', size) stub_const('Gitlab::Ci::Trace::ChunkedIO::CHUNK_SIZE', size) end -- GitLab From c588dd843a6c68d0c10e5c447bfbe77ad7f3d9ea Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Thu, 26 Apr 2018 15:18:08 +0900 Subject: [PATCH 63/86] Rename ExclusiveLease for trace write locking --- app/models/ci/build_trace_chunk.rb | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb index f3beb6d4156c..794db48e1dce 100644 --- a/app/models/ci/build_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -12,9 +12,9 @@ class BuildTraceChunk < ActiveRecord::Base CHUNK_SIZE = 128.kilobytes CHUNK_REDIS_TTL = 1.week - LOCK_RETRY = 100 - LOCK_SLEEP = 1 - LOCK_TTL = 5.minutes + WRITE_LOCK_RETRY = 100 + WRITE_LOCK_SLEEP = 1 + WRITE_LOCK_TTL = 5.minutes enum data_store: { redis: 1, @@ -122,18 +122,18 @@ def redis_data_key end def redis_lock_key - "gitlab:ci:trace:#{build_id}:chunks:#{chunk_index}:lock" + "trace_write:#{build_id}:chunks:#{chunk_index}" end def in_lock - lease = Gitlab::ExclusiveLease.new(redis_lock_key, timeout: LOCK_TTL) + lease = Gitlab::ExclusiveLease.new(redis_lock_key, timeout: WRITE_LOCK_TTL) retry_count = 0 until uuid = lease.try_obtain # Keep trying until we obtain the lease. To prevent hammering Redis too # much we'll wait for a bit between retries. - sleep(LOCK_SLEEP) - break if LOCK_RETRY < (retry_count += 1) + sleep(WRITE_LOCK_SLEEP) + break if WRITE_LOCK_RETRY < (retry_count += 1) end raise WriteError, 'Failed to obtain write lock' unless uuid -- GitLab From f819bb7270979cb47a3f5ca97826c9491c983e4d Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Thu, 26 Apr 2018 16:30:27 +0900 Subject: [PATCH 64/86] Optimize Trace#write/append/raw by caching data and avoiding unnecesary truncate --- app/models/ci/build_trace_chunk.rb | 62 +++++++++++++++++------------- lib/gitlab/ci/trace/chunked_io.rb | 2 +- lib/gitlab/ci/trace/stream.rb | 1 - 3 files changed, 36 insertions(+), 29 deletions(-) diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb index 794db48e1dce..9e7ebf41ee8f 100644 --- a/app/models/ci/build_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -21,36 +21,14 @@ class BuildTraceChunk < ActiveRecord::Base db: 2 } + ## + # Data is memoized for optimizing #size and #end_offset def data - if redis? - redis_data - elsif db? - raw_data - else - raise 'Unsupported data store' - end&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default - end - - def set_data(value) - raise ArgumentError, 'too much data' if value.bytesize > CHUNK_SIZE - - in_lock do - if redis? - redis_set_data(value) - elsif db? - self.raw_data = value - else - raise 'Unsupported data store' - end - - save! if changed? - end - - schedule_to_db if fullfilled? + @data ||= get_data end def truncate(offset = 0) - self.append("", offset) + self.append("", offset) if offset < size end def append(new_data, offset) @@ -58,7 +36,7 @@ def append(new_data, offset) raise ArgumentError, 'Offset is out of bound' if offset > current_data.bytesize || offset < 0 raise ArgumentError, 'Outside of chunk size' if CHUNK_SIZE < offset + new_data.bytesize - self.set_data(current_data.byteslice(0, offset) + new_data) + set_data(current_data.byteslice(0, offset) + new_data) end def size @@ -89,6 +67,36 @@ def use_database! private + def get_data + if redis? + redis_data + elsif db? + raw_data + else + raise 'Unsupported data store' + end&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default + end + + def set_data(value) + raise ArgumentError, 'too much data' if value.bytesize > CHUNK_SIZE + + in_lock do + if redis? + redis_set_data(value) + elsif db? + self.raw_data = value + else + raise 'Unsupported data store' + end + + @data = value + + save! if changed? + end + + schedule_to_db if fullfilled? + end + def schedule_to_db return if db? diff --git a/lib/gitlab/ci/trace/chunked_io.rb b/lib/gitlab/ci/trace/chunked_io.rb index 877be08b219e..b45f2fa87c55 100644 --- a/lib/gitlab/ci/trace/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_io.rb @@ -143,7 +143,7 @@ def truncate(offset) trace_chunks.where('chunk_index > ?', chunk_index).destroy_all # truncate current chunk - current_chunk.truncate(chunk_offset) if chunk_offset != 0 + current_chunk.truncate(chunk_offset) ensure invalidate_chunk_cache end diff --git a/lib/gitlab/ci/trace/stream.rb b/lib/gitlab/ci/trace/stream.rb index a71040e5e564..89e36ecbb15b 100644 --- a/lib/gitlab/ci/trace/stream.rb +++ b/lib/gitlab/ci/trace/stream.rb @@ -52,7 +52,6 @@ def set(data) stream.seek(0, IO::SEEK_SET) stream.write(data) - stream.truncate(data.bytesize) stream.flush() end -- GitLab From b4e239dfae1159027c9182fe2369b32321d59e51 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 27 Apr 2018 23:25:21 +0900 Subject: [PATCH 65/86] Fix schema version --- db/schema.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/db/schema.rb b/db/schema.rb index 06232f688945..e2fe269079ce 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -11,7 +11,7 @@ # # It's strongly recommended that you check this file into your version control system. -ActiveRecord::Schema.define(version: 20180425205249) do +ActiveRecord::Schema.define(version: 20180425131009) do # These are extensions that must be enabled in order to support this database enable_extension "plpgsql" -- GitLab From 270b5867306c9f38cfa73402a4b00bc8fd982719 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 27 Apr 2018 23:32:35 +0900 Subject: [PATCH 66/86] Fix static analysys for build_trace_chunk_spec.rb:8 --- spec/models/ci/build_trace_chunk_spec.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb index 988e23146d24..81e58eca3ed3 100644 --- a/spec/models/ci/build_trace_chunk_spec.rb +++ b/spec/models/ci/build_trace_chunk_spec.rb @@ -5,7 +5,7 @@ let(:chunk_index) { 0 } let(:data_store) { :redis } let(:raw_data) { nil } - + let(:build_trace_chunk) do described_class.new(build: build, chunk_index: chunk_index, data_store: data_store, raw_data: raw_data) end -- GitLab From 23c8e198463d566d2e8d2351c315741903035a64 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Mon, 30 Apr 2018 14:52:29 +0900 Subject: [PATCH 67/86] Fix spec. Revert #truncate in stream (But still prevent redandant calls) --- app/models/ci/build_trace_chunk.rb | 9 ++-- lib/gitlab/ci/trace/stream.rb | 1 + spec/factories/ci/build_trace_chunks.rb | 2 +- spec/models/ci/build_trace_chunk_spec.rb | 43 +++++++++----------- spec/services/ci/retry_build_service_spec.rb | 2 +- 5 files changed, 27 insertions(+), 30 deletions(-) diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb index 9e7ebf41ee8f..a8f43fd549ff 100644 --- a/app/models/ci/build_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -24,7 +24,7 @@ class BuildTraceChunk < ActiveRecord::Base ## # Data is memoized for optimizing #size and #end_offset def data - @data ||= get_data + @data ||= get_data.to_s end def truncate(offset = 0) @@ -32,11 +32,10 @@ def truncate(offset = 0) end def append(new_data, offset) - current_data = self.data.to_s - raise ArgumentError, 'Offset is out of bound' if offset > current_data.bytesize || offset < 0 - raise ArgumentError, 'Outside of chunk size' if CHUNK_SIZE < offset + new_data.bytesize + raise ArgumentError, 'Offset is out of range' if offset > data.bytesize || offset < 0 + raise ArgumentError, 'Chunk size overflow' if CHUNK_SIZE < (offset + new_data.bytesize) - set_data(current_data.byteslice(0, offset) + new_data) + set_data(data.byteslice(0, offset) + new_data) end def size diff --git a/lib/gitlab/ci/trace/stream.rb b/lib/gitlab/ci/trace/stream.rb index 89e36ecbb15b..a71040e5e564 100644 --- a/lib/gitlab/ci/trace/stream.rb +++ b/lib/gitlab/ci/trace/stream.rb @@ -52,6 +52,7 @@ def set(data) stream.seek(0, IO::SEEK_SET) stream.write(data) + stream.truncate(data.bytesize) stream.flush() end diff --git a/spec/factories/ci/build_trace_chunks.rb b/spec/factories/ci/build_trace_chunks.rb index be13a84a47c0..c0b9a25bfe8e 100644 --- a/spec/factories/ci/build_trace_chunks.rb +++ b/spec/factories/ci/build_trace_chunks.rb @@ -1,6 +1,6 @@ FactoryBot.define do factory :ci_build_trace_chunk, class: Ci::BuildTraceChunk do - job factory: :ci_build + build factory: :ci_build chunk_index 0 data_store :redis end diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb index 81e58eca3ed3..a122ee84b3c5 100644 --- a/spec/models/ci/build_trace_chunk_spec.rb +++ b/spec/models/ci/build_trace_chunk_spec.rb @@ -46,7 +46,7 @@ end describe '#set_data' do - subject { build_trace_chunk.set_data(value) } + subject { build_trace_chunk.send(:set_data, value) } let(:value) { 'Sample data' } @@ -131,13 +131,17 @@ context 'when offset is negative' do let(:offset) { -1 } - it { expect { subject }.to raise_error('Offset is out of bound') } + it { expect { subject }.to raise_error('Offset is out of range') } end context 'when offset is bigger than data size' do let(:offset) { data.bytesize + 1 } - it { expect { subject }.to raise_error('Offset is out of bound') } + it do + expect_any_instance_of(described_class).not_to receive(:append) { } + + subject + end end context 'when offset is 10' do @@ -182,19 +186,19 @@ context 'when offset is negative' do let(:offset) { -1 } - it { expect { subject }.to raise_error('Offset is out of bound') } + it { expect { subject }.to raise_error('Offset is out of range') } end context 'when offset is bigger than data size' do let(:offset) { data.bytesize + 1 } - it { expect { subject }.to raise_error('Offset is out of bound') } + it { expect { subject }.to raise_error('Offset is out of range') } end context 'when offset is bigger than data size' do let(:new_data) { 'a' * (described_class::CHUNK_SIZE + 1) } - it { expect { subject }.to raise_error('Outside of chunk size') } + it { expect { subject }.to raise_error('Chunk size overflow') } end context 'when offset is EOF' do @@ -320,7 +324,7 @@ describe 'ExclusiveLock' do before do allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain) { nil } - stub_const('Ci::BuildTraceChunk::LOCK_RETRY', 1) + stub_const('Ci::BuildTraceChunk::WRITE_LOCK_RETRY', 1) end it 'raise an error' do @@ -333,30 +337,31 @@ before do pipeline = create(:ci_pipeline, project: project) - create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) - create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) - create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) + @build_ids = [] + @build_ids << create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project).id + @build_ids << create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project).id + @build_ids << create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project).id end shared_examples_for 'deletes all build_trace_chunk and data in redis' do it do - project.builds.each do |build| + @build_ids.each do |build_id| Gitlab::Redis::SharedState.with do |redis| - redis.scan_each(match: "gitlab:ci:trace:#{build.id}:chunks:?") do |key| + redis.scan_each(match: "gitlab:ci:trace:#{build_id}:chunks:?") do |key| expect(redis.exists(key)).to be_truthy end end end - expect(described_class.count).not_to eq(0) + expect(described_class.count).to eq(3) subject expect(described_class.count).to eq(0) - project.builds.each do |build| + @build_ids.each do |build_id| Gitlab::Redis::SharedState.with do |redis| - redis.scan_each(match: "gitlab:ci:trace:#{build.id}:chunks:?") do |key| + redis.scan_each(match: "gitlab:ci:trace:#{build_id}:chunks:?") do |key| expect(redis.exists(key)).to be_falsey end end @@ -364,14 +369,6 @@ end end - context 'when build_trace_chunk is destroyed' do - let(:subject) do - project.builds.each { |build| build.chunks.destroy_all } - end - - it_behaves_like 'deletes all build_trace_chunk and data in redis' - end - context 'when build is destroyed' do let(:subject) do project.builds.destroy_all diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb index 955894444eb4..cc200f6fc0a8 100644 --- a/spec/services/ci/retry_build_service_spec.rb +++ b/spec/services/ci/retry_build_service_spec.rb @@ -30,7 +30,7 @@ runner_id tag_taggings taggings tags trigger_request_id user_id auto_canceled_by_id retried failure_reason artifacts_file_store artifacts_metadata_store - metadata chunks].freeze + metadata trace_chunks].freeze shared_examples 'build duplication' do let(:another_pipeline) { create(:ci_empty_pipeline, project: project) } -- GitLab From d5344617a8b57e4d6d15f22ad3d09d5af82100fe Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Mon, 30 Apr 2018 15:27:05 +0900 Subject: [PATCH 68/86] Fix spec when parent record is destroyed --- app/models/ci/build_trace_chunk.rb | 2 +- spec/models/ci/build_trace_chunk_spec.rb | 25 ++++++++---------------- 2 files changed, 9 insertions(+), 18 deletions(-) diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb index a8f43fd549ff..b9b84104b33f 100644 --- a/app/models/ci/build_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -125,7 +125,7 @@ def redis_delete_data end def redis_data_key - "gitlab:ci:trace:#{build_id}:chunks:#{chunk_index}:data" + "gitlab:ci:trace:#{build_id}:chunks:#{chunk_index}" end def redis_lock_key diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb index a122ee84b3c5..cab5db9ca06b 100644 --- a/spec/models/ci/build_trace_chunk_spec.rb +++ b/spec/models/ci/build_trace_chunk_spec.rb @@ -332,25 +332,20 @@ end end - describe 'deletes data in redis after chunk record destroyed' do + describe 'deletes data in redis after a parent record destroyed' do let(:project) { create(:project) } before do pipeline = create(:ci_pipeline, project: project) - @build_ids = [] - @build_ids << create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project).id - @build_ids << create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project).id - @build_ids << create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project).id + create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) + create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) + create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) end shared_examples_for 'deletes all build_trace_chunk and data in redis' do it do - @build_ids.each do |build_id| - Gitlab::Redis::SharedState.with do |redis| - redis.scan_each(match: "gitlab:ci:trace:#{build_id}:chunks:?") do |key| - expect(redis.exists(key)).to be_truthy - end - end + Gitlab::Redis::SharedState.with do |redis| + expect(redis.scan_each(match: "gitlab:ci:trace:?:chunks:?").to_a.count).to eq(3) end expect(described_class.count).to eq(3) @@ -359,12 +354,8 @@ expect(described_class.count).to eq(0) - @build_ids.each do |build_id| - Gitlab::Redis::SharedState.with do |redis| - redis.scan_each(match: "gitlab:ci:trace:#{build_id}:chunks:?") do |key| - expect(redis.exists(key)).to be_falsey - end - end + Gitlab::Redis::SharedState.with do |redis| + expect(redis.scan_each(match: "gitlab:ci:trace:?:chunks:?").to_a.count).to eq(0) end end end -- GitLab From 6ed91266d021c2058697d12a0123bec9b00548ad Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Wed, 2 May 2018 14:27:28 +0900 Subject: [PATCH 69/86] Skip truncate when offset == size. Fix static analysys. --- lib/gitlab/ci/trace/chunked_io.rb | 1 + spec/models/ci/build_trace_chunk_spec.rb | 10 ++++++---- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/lib/gitlab/ci/trace/chunked_io.rb b/lib/gitlab/ci/trace/chunked_io.rb index b45f2fa87c55..f03c7071b46a 100644 --- a/lib/gitlab/ci/trace/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_io.rb @@ -135,6 +135,7 @@ def write(data) def truncate(offset) raise ArgumentError, 'Outside of file' if offset > size + return if offset == size # Skip the following process as it doesn't affect anything @tell = offset @size = offset diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb index cab5db9ca06b..b0ede29669ea 100644 --- a/spec/models/ci/build_trace_chunk_spec.rb +++ b/spec/models/ci/build_trace_chunk_spec.rb @@ -345,7 +345,7 @@ shared_examples_for 'deletes all build_trace_chunk and data in redis' do it do Gitlab::Redis::SharedState.with do |redis| - expect(redis.scan_each(match: "gitlab:ci:trace:?:chunks:?").to_a.count).to eq(3) + expect(redis.scan_each(match: "gitlab:ci:trace:?:chunks:?").to_a.size).to eq(3) end expect(described_class.count).to eq(3) @@ -355,14 +355,16 @@ expect(described_class.count).to eq(0) Gitlab::Redis::SharedState.with do |redis| - expect(redis.scan_each(match: "gitlab:ci:trace:?:chunks:?").to_a.count).to eq(0) + expect(redis.scan_each(match: "gitlab:ci:trace:?:chunks:?").to_a.size).to eq(0) end end end - context 'when build is destroyed' do + context 'when traces are archived' do let(:subject) do - project.builds.destroy_all + project.builds.each do |build| + build.success! + end end it_behaves_like 'deletes all build_trace_chunk and data in redis' -- GitLab From 4b34c875f7166d8bddf57952c3ed46b1291bdf77 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Wed, 2 May 2018 14:46:04 +0900 Subject: [PATCH 70/86] Add guardclause of offset < 0 in #truncate --- lib/gitlab/ci/trace/chunked_io.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/gitlab/ci/trace/chunked_io.rb b/lib/gitlab/ci/trace/chunked_io.rb index f03c7071b46a..cd3d13644110 100644 --- a/lib/gitlab/ci/trace/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_io.rb @@ -134,7 +134,7 @@ def write(data) end def truncate(offset) - raise ArgumentError, 'Outside of file' if offset > size + raise ArgumentError, 'Outside of file' if offset > size || offset < 0 return if offset == size # Skip the following process as it doesn't affect anything @tell = offset -- GitLab From abe87373ab209c144cf684b57b12262a8df60540 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Wed, 2 May 2018 16:20:56 +0900 Subject: [PATCH 71/86] Enable feature flag on build_trace_chunk_spec --- spec/models/ci/build_trace_chunk_spec.rb | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb index b0ede29669ea..99fbba4afa98 100644 --- a/spec/models/ci/build_trace_chunk_spec.rb +++ b/spec/models/ci/build_trace_chunk_spec.rb @@ -10,6 +10,10 @@ described_class.new(build: build, chunk_index: chunk_index, data_store: data_store, raw_data: raw_data) end + before do + stub_feature_flags(ci_enable_live_trace: true) + end + describe 'CHUNK_SIZE' do it 'Chunk size can not be changed without special care' do expect(described_class::CHUNK_SIZE).to eq(128.kilobytes) -- GitLab From 950df8babf07d7135b1f7cf95586d40f0ca36ff6 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Wed, 2 May 2018 16:52:54 +0900 Subject: [PATCH 72/86] Rename BuildTraceSwapChunkWorker to BuildTraceChunkFlushToDBWorker --- app/models/ci/build_trace_chunk.rb | 2 +- app/workers/all_queues.yml | 2 +- ...hunk_worker.rb => build_trace_chunk_flush_to_db_worker.rb} | 2 +- spec/models/ci/build_trace_chunk_spec.rb | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) rename app/workers/{build_trace_swap_chunk_worker.rb => build_trace_chunk_flush_to_db_worker.rb} (87%) diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb index b9b84104b33f..4c1449cbb706 100644 --- a/app/models/ci/build_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -99,7 +99,7 @@ def set_data(value) def schedule_to_db return if db? - BuildTraceSwapChunkWorker.perform_async(id) + BuildTraceChunkFlushToDBWorker.perform_async(id) end def fullfilled? diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml index 44388e7ca0f6..0d30971dfd20 100644 --- a/app/workers/all_queues.yml +++ b/app/workers/all_queues.yml @@ -66,7 +66,7 @@ - pipeline_processing:pipeline_update - pipeline_processing:stage_update - pipeline_processing:update_head_pipeline_for_merge_request -- pipeline_processing:build_trace_swap_chunk +- pipeline_processing:build_trace_chunk_flush_to_db - repository_check:repository_check_clear - repository_check:repository_check_single_repository diff --git a/app/workers/build_trace_swap_chunk_worker.rb b/app/workers/build_trace_chunk_flush_to_db_worker.rb similarity index 87% rename from app/workers/build_trace_swap_chunk_worker.rb rename to app/workers/build_trace_chunk_flush_to_db_worker.rb index 1392b9d34e1e..447dfc4d2294 100644 --- a/app/workers/build_trace_swap_chunk_worker.rb +++ b/app/workers/build_trace_chunk_flush_to_db_worker.rb @@ -1,4 +1,4 @@ -class BuildTraceSwapChunkWorker +class BuildTraceChunkFlushToDBWorker include ApplicationWorker include PipelineQueue diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb index 99fbba4afa98..4a58971a4417 100644 --- a/spec/models/ci/build_trace_chunk_spec.rb +++ b/spec/models/ci/build_trace_chunk_spec.rb @@ -75,7 +75,7 @@ let(:value) { 'a' * described_class::CHUNK_SIZE } it 'schedules stashing data' do - expect(BuildTraceSwapChunkWorker).to receive(:perform_async).once + expect(BuildTraceChunkFlushToDBWorker).to receive(:perform_async).once subject end @@ -112,7 +112,7 @@ context 'when fullfilled chunk size' do it 'does not schedule stashing data' do - expect(BuildTraceSwapChunkWorker).not_to receive(:perform_async) + expect(BuildTraceChunkFlushToDBWorker).not_to receive(:perform_async) subject end -- GitLab From 7d626c41ccb4531dceeb1d1025c067bac3c63c4d Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Wed, 2 May 2018 21:22:08 +0900 Subject: [PATCH 73/86] Fix BuildTraceChunkFlushToDbWorker name --- app/models/ci/build_trace_chunk.rb | 2 +- app/workers/build_trace_chunk_flush_to_db_worker.rb | 2 +- spec/models/ci/build_trace_chunk_spec.rb | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb index 4c1449cbb706..ee999a3395dc 100644 --- a/app/models/ci/build_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -99,7 +99,7 @@ def set_data(value) def schedule_to_db return if db? - BuildTraceChunkFlushToDBWorker.perform_async(id) + BuildTraceChunkFlushToDbWorker.perform_async(id) end def fullfilled? diff --git a/app/workers/build_trace_chunk_flush_to_db_worker.rb b/app/workers/build_trace_chunk_flush_to_db_worker.rb index 447dfc4d2294..495118f7c147 100644 --- a/app/workers/build_trace_chunk_flush_to_db_worker.rb +++ b/app/workers/build_trace_chunk_flush_to_db_worker.rb @@ -1,4 +1,4 @@ -class BuildTraceChunkFlushToDBWorker +class BuildTraceChunkFlushToDbWorker include ApplicationWorker include PipelineQueue diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb index 4a58971a4417..dbeee74857c3 100644 --- a/spec/models/ci/build_trace_chunk_spec.rb +++ b/spec/models/ci/build_trace_chunk_spec.rb @@ -75,7 +75,7 @@ let(:value) { 'a' * described_class::CHUNK_SIZE } it 'schedules stashing data' do - expect(BuildTraceChunkFlushToDBWorker).to receive(:perform_async).once + expect(BuildTraceChunkFlushToDbWorker).to receive(:perform_async).once subject end @@ -112,7 +112,7 @@ context 'when fullfilled chunk size' do it 'does not schedule stashing data' do - expect(BuildTraceChunkFlushToDBWorker).not_to receive(:perform_async) + expect(BuildTraceChunkFlushToDbWorker).not_to receive(:perform_async) subject end -- GitLab From 9ad20891435f1f51ae6e8e951b84131480d1ab97 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Wed, 2 May 2018 22:14:55 +0900 Subject: [PATCH 74/86] Fix Redis scan_each wildcard expression in spec --- spec/models/ci/build_trace_chunk_spec.rb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb index dbeee74857c3..991f501f6339 100644 --- a/spec/models/ci/build_trace_chunk_spec.rb +++ b/spec/models/ci/build_trace_chunk_spec.rb @@ -349,7 +349,7 @@ shared_examples_for 'deletes all build_trace_chunk and data in redis' do it do Gitlab::Redis::SharedState.with do |redis| - expect(redis.scan_each(match: "gitlab:ci:trace:?:chunks:?").to_a.size).to eq(3) + expect(redis.scan_each(match: "gitlab:ci:trace:*:chunks:*").to_a.size).to eq(3) end expect(described_class.count).to eq(3) @@ -359,7 +359,7 @@ expect(described_class.count).to eq(0) Gitlab::Redis::SharedState.with do |redis| - expect(redis.scan_each(match: "gitlab:ci:trace:?:chunks:?").to_a.size).to eq(0) + expect(redis.scan_each(match: "gitlab:ci:trace:*:chunks:*").to_a.size).to eq(0) end end end -- GitLab From 812dd06d512ab7774b375ce45aa9235aafc99911 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 4 May 2018 17:02:08 +0900 Subject: [PATCH 75/86] Introduce Redis helpers. Rename BuildTraceChunkFlushToDbWorker to Ci::BuildTraceChunkFlushWorker. --- app/models/ci/build_trace_chunk.rb | 2 +- app/workers/all_queues.yml | 2 +- .../build_trace_chunk_flush_to_db_worker.rb | 12 ------------ .../ci/build_trace_chunk_flush_worker.rb | 13 +++++++++++++ spec/models/ci/build_trace_chunk_spec.rb | 4 ++-- spec/requests/api/runner_spec.rb | 6 +++--- spec/spec_helper.rb | 19 +++++++++++++------ spec/support/chunked_io/chunked_io_helpers.rb | 5 ----- spec/support/redis/redis_helpers.rb | 18 ++++++++++++++++++ 9 files changed, 51 insertions(+), 30 deletions(-) delete mode 100644 app/workers/build_trace_chunk_flush_to_db_worker.rb create mode 100644 app/workers/ci/build_trace_chunk_flush_worker.rb create mode 100644 spec/support/redis/redis_helpers.rb diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb index ee999a3395dc..870b4ae20336 100644 --- a/app/models/ci/build_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -99,7 +99,7 @@ def set_data(value) def schedule_to_db return if db? - BuildTraceChunkFlushToDbWorker.perform_async(id) + Ci::BuildTraceChunkFlushWorker.perform_async(id) end def fullfilled? diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml index 0d30971dfd20..dc628bb7f50c 100644 --- a/app/workers/all_queues.yml +++ b/app/workers/all_queues.yml @@ -66,7 +66,7 @@ - pipeline_processing:pipeline_update - pipeline_processing:stage_update - pipeline_processing:update_head_pipeline_for_merge_request -- pipeline_processing:build_trace_chunk_flush_to_db +- pipeline_processing:ci_build_trace_chunk_flush - repository_check:repository_check_clear - repository_check:repository_check_single_repository diff --git a/app/workers/build_trace_chunk_flush_to_db_worker.rb b/app/workers/build_trace_chunk_flush_to_db_worker.rb deleted file mode 100644 index 495118f7c147..000000000000 --- a/app/workers/build_trace_chunk_flush_to_db_worker.rb +++ /dev/null @@ -1,12 +0,0 @@ -class BuildTraceChunkFlushToDbWorker - include ApplicationWorker - include PipelineQueue - - queue_namespace :pipeline_processing - - def perform(build_trace_chunk_id) - Ci::BuildTraceChunk.find_by(id: build_trace_chunk_id).try do |build_trace_chunk| - build_trace_chunk.use_database! - end - end -end diff --git a/app/workers/ci/build_trace_chunk_flush_worker.rb b/app/workers/ci/build_trace_chunk_flush_worker.rb new file mode 100644 index 000000000000..b8f8be29c7bd --- /dev/null +++ b/app/workers/ci/build_trace_chunk_flush_worker.rb @@ -0,0 +1,13 @@ +module Ci + class BuildTraceChunkFlushWorker + include ApplicationWorker + + queue_namespace :pipeline_processing + + def perform(build_trace_chunk_id) + ::Ci::BuildTraceChunk.find_by(id: build_trace_chunk_id).try do |build_trace_chunk| + build_trace_chunk.use_database! + end + end + end +end diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb index 991f501f6339..118b72da11c0 100644 --- a/spec/models/ci/build_trace_chunk_spec.rb +++ b/spec/models/ci/build_trace_chunk_spec.rb @@ -75,7 +75,7 @@ let(:value) { 'a' * described_class::CHUNK_SIZE } it 'schedules stashing data' do - expect(BuildTraceChunkFlushToDbWorker).to receive(:perform_async).once + expect(Ci::BuildTraceChunkFlushWorker).to receive(:perform_async).once subject end @@ -112,7 +112,7 @@ context 'when fullfilled chunk size' do it 'does not schedule stashing data' do - expect(BuildTraceChunkFlushToDbWorker).not_to receive(:perform_async) + expect(Ci::BuildTraceChunkFlushWorker).not_to receive(:perform_async) subject end diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb index f27c95b4907f..ff23b6252447 100644 --- a/spec/requests/api/runner_spec.rb +++ b/spec/requests/api/runner_spec.rb @@ -2,7 +2,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do include StubGitlabCalls - include ChunkedIOHelpers + include RedisHelpers let(:registration_token) { 'abcdefg123456' } @@ -873,8 +873,8 @@ def update_job(token = job.token, **params) patch_the_trace expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended' - # GitLab-Rails enxounters an outage on Redis - redis_shared_state_outage! + # GitLab-Rails encounters an outage on Redis + redis_shared_state_cleanup! expect(job.reload.trace.raw).to eq '' # GitLab-Runner patchs diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index cc61cd7d838d..b4fc596a751f 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -86,6 +86,7 @@ config.include WaitForRequests, :js config.include LiveDebugger, :js config.include MigrationsHelpers, :migration + config.include RedisHelpers if ENV['CI'] # This includes the first try, i.e. tests will be run 4 times before failing. @@ -146,21 +147,27 @@ end config.around(:each, :clean_gitlab_redis_cache) do |example| - Gitlab::Redis::Cache.with(&:flushall) + redis_cache_cleanup! example.run - Gitlab::Redis::Cache.with(&:flushall) + redis_cache_cleanup! end config.around(:each, :clean_gitlab_redis_shared_state) do |example| - Gitlab::Redis::SharedState.with(&:flushall) - Sidekiq.redis(&:flushall) + redis_shared_state_cleanup! example.run - Gitlab::Redis::SharedState.with(&:flushall) - Sidekiq.redis(&:flushall) + redis_shared_state_cleanup! + end + + config.around(:each, :clean_gitlab_redis_queues) do |example| + redis_queues_cleanup! + + example.run + + redis_queues_cleanup! end # The :each scope runs "inside" the example, so this hook ensures the DB is in the diff --git a/spec/support/chunked_io/chunked_io_helpers.rb b/spec/support/chunked_io/chunked_io_helpers.rb index 4238a4b3e947..fec1f9515632 100644 --- a/spec/support/chunked_io/chunked_io_helpers.rb +++ b/spec/support/chunked_io/chunked_io_helpers.rb @@ -8,9 +8,4 @@ def stub_buffer_size(size) stub_const('Ci::BuildTraceChunk::CHUNK_SIZE', size) stub_const('Gitlab::Ci::Trace::ChunkedIO::CHUNK_SIZE', size) end - - def redis_shared_state_outage! - Gitlab::Redis::SharedState.with(&:flushall) - Sidekiq.redis(&:flushall) - end end diff --git a/spec/support/redis/redis_helpers.rb b/spec/support/redis/redis_helpers.rb new file mode 100644 index 000000000000..0457e8487d8c --- /dev/null +++ b/spec/support/redis/redis_helpers.rb @@ -0,0 +1,18 @@ +module RedisHelpers + # config/README.md + + # Usage: performance enhancement + def redis_cache_cleanup! + Gitlab::Redis::Cache.with(&:flushall) + end + + # Usage: SideKiq, Mailroom, CI Runner, Workhorse, push services + def redis_queues_cleanup! + Gitlab::Redis::Queues.with(&:flushall) + end + + # Usage: session state, rate limiting + def redis_shared_state_cleanup! + Gitlab::Redis::SharedState.with(&:flushall) + end +end -- GitLab From 28284c14973a59d5a7f0f8d2862da7f61b101640 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 4 May 2018 17:42:37 +0900 Subject: [PATCH 76/86] Add validation and skip logic at #truncate --- app/models/ci/build_trace_chunk.rb | 7 +++++-- spec/models/ci/build_trace_chunk_spec.rb | 6 +----- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb index 870b4ae20336..08a4465821c7 100644 --- a/app/models/ci/build_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -28,11 +28,14 @@ def data end def truncate(offset = 0) - self.append("", offset) if offset < size + raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0 + return if offset == size # Skip the following process as it doesn't affect anything + + self.append("", offset) end def append(new_data, offset) - raise ArgumentError, 'Offset is out of range' if offset > data.bytesize || offset < 0 + raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0 raise ArgumentError, 'Chunk size overflow' if CHUNK_SIZE < (offset + new_data.bytesize) set_data(data.byteslice(0, offset) + new_data) diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb index 118b72da11c0..46d09dff52cb 100644 --- a/spec/models/ci/build_trace_chunk_spec.rb +++ b/spec/models/ci/build_trace_chunk_spec.rb @@ -141,11 +141,7 @@ context 'when offset is bigger than data size' do let(:offset) { data.bytesize + 1 } - it do - expect_any_instance_of(described_class).not_to receive(:append) { } - - subject - end + it { expect { subject }.to raise_error('Offset is out of range') } end context 'when offset is 10' do -- GitLab From e1ad8ca6d090df2b1431a9ed58bb51ba92b695d9 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 4 May 2018 19:49:26 +0900 Subject: [PATCH 77/86] Test trace_spec on both conditions - ci_enable_live_trace is on/off --- spec/lib/gitlab/ci/trace_spec.rb | 666 +--------------- .../ci_trace_shared_exmaples.rb | 741 ++++++++++++++++++ 2 files changed, 747 insertions(+), 660 deletions(-) create mode 100644 spec/support/shared_examples/ci_trace_shared_exmaples.rb diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb index faa23461a915..e9d755c2021a 100644 --- a/spec/lib/gitlab/ci/trace_spec.rb +++ b/spec/lib/gitlab/ci/trace_spec.rb @@ -4,678 +4,24 @@ let(:build) { create(:ci_build) } let(:trace) { described_class.new(build) } - before do - stub_feature_flags(ci_enable_live_trace: true) - end - describe "associations" do it { expect(trace).to respond_to(:job) } it { expect(trace).to delegate_method(:old_trace).to(:job) } end - describe '#html' do - before do - trace.set("12\n34") - end - - it "returns formatted html" do - expect(trace.html).to eq("12
34") - end - - it "returns last line of formatted html" do - expect(trace.html(last_lines: 1)).to eq("34") - end - end - - describe '#raw' do - before do - trace.set("12\n34") - end - - it "returns raw output" do - expect(trace.raw).to eq("12\n34") - end - - it "returns last line of raw output" do - expect(trace.raw(last_lines: 1)).to eq("34") - end - end - - describe '#extract_coverage' do - let(:regex) { '\(\d+.\d+\%\) covered' } - - context 'matching coverage' do - before do - trace.set('Coverage 1033 / 1051 LOC (98.29%) covered') - end - - it "returns valid coverage" do - expect(trace.extract_coverage(regex)).to eq("98.29") - end - end - - context 'no coverage' do - before do - trace.set('No coverage') - end - - it 'returs nil' do - expect(trace.extract_coverage(regex)).to be_nil - end - end - end - - describe '#extract_sections' do - let(:log) { 'No sections' } - let(:sections) { trace.extract_sections } - - before do - trace.set(log) - end - - context 'no sections' do - it 'returs []' do - expect(trace.extract_sections).to eq([]) - end - end - - context 'multiple sections available' do - let(:log) { File.read(expand_fixture_path('trace/trace_with_sections')) } - let(:sections_data) do - [ - { name: 'prepare_script', lines: 2, duration: 3.seconds }, - { name: 'get_sources', lines: 4, duration: 1.second }, - { name: 'restore_cache', lines: 0, duration: 0.seconds }, - { name: 'download_artifacts', lines: 0, duration: 0.seconds }, - { name: 'build_script', lines: 2, duration: 1.second }, - { name: 'after_script', lines: 0, duration: 0.seconds }, - { name: 'archive_cache', lines: 0, duration: 0.seconds }, - { name: 'upload_artifacts', lines: 0, duration: 0.seconds } - ] - end - - it "returns valid sections" do - expect(sections).not_to be_empty - expect(sections.size).to eq(sections_data.size), - "expected #{sections_data.size} sections, got #{sections.size}" - - buff = StringIO.new(log) - sections.each_with_index do |s, i| - expected = sections_data[i] - - expect(s[:name]).to eq(expected[:name]) - expect(s[:date_end] - s[:date_start]).to eq(expected[:duration]) - - buff.seek(s[:byte_start], IO::SEEK_SET) - length = s[:byte_end] - s[:byte_start] - lines = buff.read(length).count("\n") - expect(lines).to eq(expected[:lines]) - end - end - end - - context 'logs contains "section_start"' do - let(:log) { "section_start:1506417476:a_section\r\033[0Klooks like a section_start:invalid\nsection_end:1506417477:a_section\r\033[0K"} - - it "returns only one section" do - expect(sections).not_to be_empty - expect(sections.size).to eq(1) - - section = sections[0] - expect(section[:name]).to eq('a_section') - expect(section[:byte_start]).not_to eq(section[:byte_end]), "got an empty section" - end - end - - context 'missing section_end' do - let(:log) { "section_start:1506417476:a_section\r\033[0KSome logs\nNo section_end\n"} - - it "returns no sections" do - expect(sections).to be_empty - end - end - - context 'missing section_start' do - let(:log) { "Some logs\nNo section_start\nsection_end:1506417476:a_section\r\033[0K"} - - it "returns no sections" do - expect(sections).to be_empty - end - end - - context 'inverted section_start section_end' do - let(:log) { "section_end:1506417476:a_section\r\033[0Klooks like a section_start:invalid\nsection_start:1506417477:a_section\r\033[0K"} - - it "returns no sections" do - expect(sections).to be_empty - end - end - end - - describe '#set' do - before do - trace.set("12") - end - - it "returns trace" do - expect(trace.raw).to eq("12") - end - - context 'overwrite trace' do - before do - trace.set("34") - end - - it "returns new trace" do - expect(trace.raw).to eq("34") - end - end - - context 'runners token' do - let(:token) { 'my_secret_token' } - - before do - build.project.update(runners_token: token) - trace.set(token) - end - - it "hides token" do - expect(trace.raw).not_to include(token) - end - end - - context 'hides build token' do - let(:token) { 'my_secret_token' } - - before do - build.update(token: token) - trace.set(token) - end - - it "hides token" do - expect(trace.raw).not_to include(token) - end - end - end - - describe '#append' do + context 'when live trace feature is disabled' do before do - trace.set("1234") - end - - it "returns correct trace" do - expect(trace.append("56", 4)).to eq(6) - expect(trace.raw).to eq("123456") - end - - context 'tries to append trace at different offset' do - it "fails with append" do - expect(trace.append("56", 2)).to eq(-4) - expect(trace.raw).to eq("1234") - end - end - - context 'runners token' do - let(:token) { 'my_secret_token' } - - before do - build.project.update(runners_token: token) - trace.append(token, 0) - end - - it "hides token" do - expect(trace.raw).not_to include(token) - end - end - - context 'build token' do - let(:token) { 'my_secret_token' } - - before do - build.update(token: token) - trace.append(token, 0) - end - - it "hides token" do - expect(trace.raw).not_to include(token) - end - end - end - - describe '#read' do - shared_examples 'read successfully with IO' do - it 'yields with source' do - trace.read do |stream| - expect(stream).to be_a(Gitlab::Ci::Trace::Stream) - expect(stream.stream).to be_a(IO) - end - end - end - - shared_examples 'read successfully with StringIO' do - it 'yields with source' do - trace.read do |stream| - expect(stream).to be_a(Gitlab::Ci::Trace::Stream) - expect(stream.stream).to be_a(StringIO) - end - end - end - - shared_examples 'read successfully with ChunkedIO' do - it 'yields with source' do - trace.read do |stream| - expect(stream).to be_a(Gitlab::Ci::Trace::Stream) - expect(stream.stream).to be_a(Gitlab::Ci::Trace::ChunkedIO) - end - end - end - - shared_examples 'failed to read' do - it 'yields without source' do - trace.read do |stream| - expect(stream).to be_a(Gitlab::Ci::Trace::Stream) - expect(stream.stream).to be_nil - end - end - end - - context 'when trace artifact exists' do - before do - create(:ci_job_artifact, :trace, job: build) - end - - it_behaves_like 'read successfully with IO' - end - - context 'when current_path (with project_id) exists' do - before do - expect(trace).to receive(:default_path) { expand_fixture_path('trace/sample_trace') } - end - - it_behaves_like 'read successfully with IO' - end - - context 'when current_path (with project_ci_id) exists' do - before do - expect(trace).to receive(:deprecated_path) { expand_fixture_path('trace/sample_trace') } - end - - it_behaves_like 'read successfully with IO' - end - - context 'when db trace exists' do - before do - build.send(:write_attribute, :trace, "data") - end - - it_behaves_like 'read successfully with StringIO' - end - - context 'when live trace exists' do - before do - Gitlab::Ci::Trace::ChunkedIO.new(build) do |stream| - stream.write('abc') - end - end - - it_behaves_like 'read successfully with ChunkedIO' - end - - context 'when no sources exist' do - it_behaves_like 'failed to read' - end - end - - describe 'trace handling' do - subject { trace.exist? } - - context 'trace does not exist' do - it { expect(trace.exist?).to be(false) } - end - - context 'when trace artifact exists' do - before do - create(:ci_job_artifact, :trace, job: build) - end - - it { is_expected.to be_truthy } - - context 'when the trace artifact has been erased' do - before do - trace.erase! - end - - it { is_expected.to be_falsy } - - it 'removes associations' do - expect(Ci::JobArtifact.exists?(job_id: build.id, file_type: :trace)).to be_falsy - end - end - end - - context 'new trace path is used' do - before do - trace.send(:ensure_directory) - - File.open(trace.send(:default_path), "w") do |file| - file.write("data") - end - end - - it "trace exist" do - expect(trace.exist?).to be(true) - end - - it "can be erased" do - trace.erase! - expect(trace.exist?).to be(false) - end - end - - context 'deprecated path' do - let(:path) { trace.send(:deprecated_path) } - - context 'with valid ci_id' do - before do - build.project.update(ci_id: 1000) - - FileUtils.mkdir_p(File.dirname(path)) - - File.open(path, "w") do |file| - file.write("data") - end - end - - it "trace exist" do - expect(trace.exist?).to be(true) - end - - it "can be erased" do - trace.erase! - expect(trace.exist?).to be(false) - end - end - - context 'without valid ci_id' do - it "does not return deprecated path" do - expect(path).to be_nil - end - end - end - - context 'stored in database' do - before do - build.send(:write_attribute, :trace, "data") - end - - it "trace exist" do - expect(trace.exist?).to be(true) - end - - it "can be erased" do - trace.erase! - expect(trace.exist?).to be(false) - end - - it "returns database data" do - expect(trace.raw).to eq("data") - end + stub_feature_flags(ci_enable_live_trace: false) end - context 'stored in database' do - before do - Gitlab::Ci::Trace::ChunkedIO.new(build) do |stream| - stream.write('abc') - end - end - - it "trace exist" do - expect(trace.exist?).to be(true) - end - - it "can be erased" do - trace.erase! - expect(trace.exist?).to be(false) - expect(Ci::BuildTraceChunk.where(build: build)).not_to be_exist - end - - it "returns live trace data" do - expect(trace.raw).to eq("abc") - end - end + it_behaves_like 'trace with disabled live trace feature' end - describe '#archive!' do - subject { trace.archive! } - + context 'when live trace feature is enabled' do before do - stub_feature_flags(ci_enable_live_trace: false) + stub_feature_flags(ci_enable_live_trace: true) end - shared_examples 'archive trace file' do - it do - expect { subject }.to change { Ci::JobArtifact.count }.by(1) - - build.reload - expect(build.trace.exist?).to be_truthy - expect(build.job_artifacts_trace.file.exists?).to be_truthy - expect(build.job_artifacts_trace.file.filename).to eq('job.log') - expect(File.exist?(src_path)).to be_falsy - expect(src_checksum) - .to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest) - expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum) - end - end - - shared_examples 'source trace file stays intact' do |error:| - it do - expect { subject }.to raise_error(error) - - build.reload - expect(build.trace.exist?).to be_truthy - expect(build.job_artifacts_trace).to be_nil - expect(File.exist?(src_path)).to be_truthy - end - end - - shared_examples 'archive trace in database' do - it do - expect { subject }.to change { Ci::JobArtifact.count }.by(1) - - build.reload - expect(build.trace.exist?).to be_truthy - expect(build.job_artifacts_trace.file.exists?).to be_truthy - expect(build.job_artifacts_trace.file.filename).to eq('job.log') - expect(build.old_trace).to be_nil - expect(src_checksum) - .to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest) - expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum) - end - end - - shared_examples 'source trace in database stays intact' do |error:| - it do - expect { subject }.to raise_error(error) - - build.reload - expect(build.trace.exist?).to be_truthy - expect(build.job_artifacts_trace).to be_nil - expect(build.old_trace).to eq(trace_content) - end - end - - shared_examples 'archive trace file in ChunkedIO' do - it do - expect { subject }.to change { Ci::JobArtifact.count }.by(1) - - build.reload - expect(build.trace.exist?).to be_truthy - expect(build.job_artifacts_trace.file.exists?).to be_truthy - expect(build.job_artifacts_trace.file.filename).to eq('job.log') - expect(Ci::BuildTraceChunk.where(build: build)).not_to be_exist - expect(src_checksum) - .to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest) - expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum) - end - end - - shared_examples 'source trace in ChunkedIO stays intact' do |error:| - it do - expect { subject }.to raise_error(error) - - build.reload - expect(build.trace.exist?).to be_truthy - expect(build.job_artifacts_trace).to be_nil - Gitlab::Ci::Trace::ChunkedIO.new(build) do |stream| - expect(stream.read).to eq(trace_raw) - end - end - end - - context 'when job does not have trace artifact' do - context 'when trace file stored in default path' do - let(:build) { create(:ci_build, :success, :trace_live) } - let(:src_path) { trace.read { |s| s.path } } - let(:src_checksum) { Digest::SHA256.file(src_path).hexdigest } - - before do - stub_feature_flags(ci_enable_live_trace: false) - build # Initialize after set feature flag - src_path - src_checksum - end - - it_behaves_like 'archive trace file' - - context 'when failed to create clone file' do - before do - allow(IO).to receive(:copy_stream).and_return(0) - end - - it_behaves_like 'source trace file stays intact', error: Gitlab::Ci::Trace::ArchiveError - end - - context 'when failed to create job artifact record' do - before do - allow_any_instance_of(Ci::JobArtifact).to receive(:save).and_return(false) - allow_any_instance_of(Ci::JobArtifact).to receive_message_chain(:errors, :full_messages) - .and_return(%w[Error Error]) - end - - it_behaves_like 'source trace file stays intact', error: ActiveRecord::RecordInvalid - end - end - - context 'when trace is stored in database' do - let(:build) { create(:ci_build, :success) } - let(:trace_content) { 'Sample trace' } - let(:src_checksum) { Digest::SHA256.hexdigest(trace_content) } - - before do - stub_feature_flags(ci_enable_live_trace: false) - build # Initialize after set feature flag - trace_content - src_checksum - build.update_column(:trace, trace_content) - end - - it_behaves_like 'archive trace in database' - - context 'when failed to create clone file' do - before do - allow(IO).to receive(:copy_stream).and_return(0) - end - - it_behaves_like 'source trace in database stays intact', error: Gitlab::Ci::Trace::ArchiveError - end - - context 'when failed to create job artifact record' do - before do - allow_any_instance_of(Ci::JobArtifact).to receive(:save).and_return(false) - allow_any_instance_of(Ci::JobArtifact).to receive_message_chain(:errors, :full_messages) - .and_return(%w[Error Error]) - end - - it_behaves_like 'source trace in database stays intact', error: ActiveRecord::RecordInvalid - end - - context 'when there is a validation error on Ci::Build' do - before do - allow_any_instance_of(Ci::Build).to receive(:save).and_return(false) - allow_any_instance_of(Ci::Build).to receive_message_chain(:errors, :full_messages) - .and_return(%w[Error Error]) - end - - context "when erase old trace with 'save'" do - before do - build.send(:write_attribute, :trace, nil) - build.save - end - - it 'old trace is not deleted' do - build.reload - expect(build.trace.raw).to eq(trace_content) - end - end - - it_behaves_like 'archive trace in database' - end - end - - context 'when trace is stored in ChunkedIO' do - let(:build) { create(:ci_build, :success, :trace_live) } - let(:trace_raw) { build.trace.raw } - let(:src_checksum) { Digest::SHA256.hexdigest(trace_raw) } - - before do - stub_feature_flags(ci_enable_live_trace: true) - build # Initialize after set feature flag - trace_raw - src_checksum - end - - it_behaves_like 'archive trace file in ChunkedIO' - - context 'when failed to create clone file' do - before do - allow(IO).to receive(:copy_stream).and_return(0) - end - - it_behaves_like 'source trace in ChunkedIO stays intact', error: Gitlab::Ci::Trace::ArchiveError - end - - context 'when failed to create job artifact record' do - before do - allow_any_instance_of(Ci::JobArtifact).to receive(:save).and_return(false) - allow_any_instance_of(Ci::JobArtifact).to receive_message_chain(:errors, :full_messages) - .and_return(%w[Error Error]) - end - - it_behaves_like 'source trace in ChunkedIO stays intact', error: ActiveRecord::RecordInvalid - end - end - end - - context 'when job has trace artifact' do - before do - create(:ci_job_artifact, :trace, job: build) - end - - it 'does not archive' do - expect_any_instance_of(described_class).not_to receive(:archive_stream!) - expect { subject }.to raise_error('Already archived') - expect(build.job_artifacts_trace.file.exists?).to be_truthy - end - end - - context 'when job is not finished yet' do - let!(:build) { create(:ci_build, :running, :trace_live) } - - it 'does not archive' do - expect_any_instance_of(described_class).not_to receive(:archive_stream!) - expect { subject }.to raise_error('Job is not finished yet') - expect(build.trace.exist?).to be_truthy - end - end + it_behaves_like 'trace with enabled live trace feature' end end diff --git a/spec/support/shared_examples/ci_trace_shared_exmaples.rb b/spec/support/shared_examples/ci_trace_shared_exmaples.rb new file mode 100644 index 000000000000..5640451dad3c --- /dev/null +++ b/spec/support/shared_examples/ci_trace_shared_exmaples.rb @@ -0,0 +1,741 @@ +shared_examples_for 'common trace features' do + describe '#html' do + before do + trace.set("12\n34") + end + + it "returns formatted html" do + expect(trace.html).to eq("12
34") + end + + it "returns last line of formatted html" do + expect(trace.html(last_lines: 1)).to eq("34") + end + end + + describe '#raw' do + before do + trace.set("12\n34") + end + + it "returns raw output" do + expect(trace.raw).to eq("12\n34") + end + + it "returns last line of raw output" do + expect(trace.raw(last_lines: 1)).to eq("34") + end + end + + describe '#extract_coverage' do + let(:regex) { '\(\d+.\d+\%\) covered' } + + context 'matching coverage' do + before do + trace.set('Coverage 1033 / 1051 LOC (98.29%) covered') + end + + it "returns valid coverage" do + expect(trace.extract_coverage(regex)).to eq("98.29") + end + end + + context 'no coverage' do + before do + trace.set('No coverage') + end + + it 'returs nil' do + expect(trace.extract_coverage(regex)).to be_nil + end + end + end + + describe '#extract_sections' do + let(:log) { 'No sections' } + let(:sections) { trace.extract_sections } + + before do + trace.set(log) + end + + context 'no sections' do + it 'returs []' do + expect(trace.extract_sections).to eq([]) + end + end + + context 'multiple sections available' do + let(:log) { File.read(expand_fixture_path('trace/trace_with_sections')) } + let(:sections_data) do + [ + { name: 'prepare_script', lines: 2, duration: 3.seconds }, + { name: 'get_sources', lines: 4, duration: 1.second }, + { name: 'restore_cache', lines: 0, duration: 0.seconds }, + { name: 'download_artifacts', lines: 0, duration: 0.seconds }, + { name: 'build_script', lines: 2, duration: 1.second }, + { name: 'after_script', lines: 0, duration: 0.seconds }, + { name: 'archive_cache', lines: 0, duration: 0.seconds }, + { name: 'upload_artifacts', lines: 0, duration: 0.seconds } + ] + end + + it "returns valid sections" do + expect(sections).not_to be_empty + expect(sections.size).to eq(sections_data.size), + "expected #{sections_data.size} sections, got #{sections.size}" + + buff = StringIO.new(log) + sections.each_with_index do |s, i| + expected = sections_data[i] + + expect(s[:name]).to eq(expected[:name]) + expect(s[:date_end] - s[:date_start]).to eq(expected[:duration]) + + buff.seek(s[:byte_start], IO::SEEK_SET) + length = s[:byte_end] - s[:byte_start] + lines = buff.read(length).count("\n") + expect(lines).to eq(expected[:lines]) + end + end + end + + context 'logs contains "section_start"' do + let(:log) { "section_start:1506417476:a_section\r\033[0Klooks like a section_start:invalid\nsection_end:1506417477:a_section\r\033[0K"} + + it "returns only one section" do + expect(sections).not_to be_empty + expect(sections.size).to eq(1) + + section = sections[0] + expect(section[:name]).to eq('a_section') + expect(section[:byte_start]).not_to eq(section[:byte_end]), "got an empty section" + end + end + + context 'missing section_end' do + let(:log) { "section_start:1506417476:a_section\r\033[0KSome logs\nNo section_end\n"} + + it "returns no sections" do + expect(sections).to be_empty + end + end + + context 'missing section_start' do + let(:log) { "Some logs\nNo section_start\nsection_end:1506417476:a_section\r\033[0K"} + + it "returns no sections" do + expect(sections).to be_empty + end + end + + context 'inverted section_start section_end' do + let(:log) { "section_end:1506417476:a_section\r\033[0Klooks like a section_start:invalid\nsection_start:1506417477:a_section\r\033[0K"} + + it "returns no sections" do + expect(sections).to be_empty + end + end + end + + describe '#set' do + before do + trace.set("12") + end + + it "returns trace" do + expect(trace.raw).to eq("12") + end + + context 'overwrite trace' do + before do + trace.set("34") + end + + it "returns new trace" do + expect(trace.raw).to eq("34") + end + end + + context 'runners token' do + let(:token) { 'my_secret_token' } + + before do + build.project.update(runners_token: token) + trace.set(token) + end + + it "hides token" do + expect(trace.raw).not_to include(token) + end + end + + context 'hides build token' do + let(:token) { 'my_secret_token' } + + before do + build.update(token: token) + trace.set(token) + end + + it "hides token" do + expect(trace.raw).not_to include(token) + end + end + end + + describe '#append' do + before do + trace.set("1234") + end + + it "returns correct trace" do + expect(trace.append("56", 4)).to eq(6) + expect(trace.raw).to eq("123456") + end + + context 'tries to append trace at different offset' do + it "fails with append" do + expect(trace.append("56", 2)).to eq(-4) + expect(trace.raw).to eq("1234") + end + end + + context 'runners token' do + let(:token) { 'my_secret_token' } + + before do + build.project.update(runners_token: token) + trace.append(token, 0) + end + + it "hides token" do + expect(trace.raw).not_to include(token) + end + end + + context 'build token' do + let(:token) { 'my_secret_token' } + + before do + build.update(token: token) + trace.append(token, 0) + end + + it "hides token" do + expect(trace.raw).not_to include(token) + end + end + end +end + +shared_examples_for 'trace with disabled live trace feature' do + it_behaves_like 'common trace features' + + describe '#read' do + shared_examples 'read successfully with IO' do + it 'yields with source' do + trace.read do |stream| + expect(stream).to be_a(Gitlab::Ci::Trace::Stream) + expect(stream.stream).to be_a(IO) + end + end + end + + shared_examples 'read successfully with StringIO' do + it 'yields with source' do + trace.read do |stream| + expect(stream).to be_a(Gitlab::Ci::Trace::Stream) + expect(stream.stream).to be_a(StringIO) + end + end + end + + shared_examples 'failed to read' do + it 'yields without source' do + trace.read do |stream| + expect(stream).to be_a(Gitlab::Ci::Trace::Stream) + expect(stream.stream).to be_nil + end + end + end + + context 'when trace artifact exists' do + before do + create(:ci_job_artifact, :trace, job: build) + end + + it_behaves_like 'read successfully with IO' + end + + context 'when current_path (with project_id) exists' do + before do + expect(trace).to receive(:default_path) { expand_fixture_path('trace/sample_trace') } + end + + it_behaves_like 'read successfully with IO' + end + + context 'when current_path (with project_ci_id) exists' do + before do + expect(trace).to receive(:deprecated_path) { expand_fixture_path('trace/sample_trace') } + end + + it_behaves_like 'read successfully with IO' + end + + context 'when db trace exists' do + before do + build.send(:write_attribute, :trace, "data") + end + + it_behaves_like 'read successfully with StringIO' + end + + context 'when no sources exist' do + it_behaves_like 'failed to read' + end + end + + describe 'trace handling' do + subject { trace.exist? } + + context 'trace does not exist' do + it { expect(trace.exist?).to be(false) } + end + + context 'when trace artifact exists' do + before do + create(:ci_job_artifact, :trace, job: build) + end + + it { is_expected.to be_truthy } + + context 'when the trace artifact has been erased' do + before do + trace.erase! + end + + it { is_expected.to be_falsy } + + it 'removes associations' do + expect(Ci::JobArtifact.exists?(job_id: build.id, file_type: :trace)).to be_falsy + end + end + end + + context 'new trace path is used' do + before do + trace.send(:ensure_directory) + + File.open(trace.send(:default_path), "w") do |file| + file.write("data") + end + end + + it "trace exist" do + expect(trace.exist?).to be(true) + end + + it "can be erased" do + trace.erase! + expect(trace.exist?).to be(false) + end + end + + context 'deprecated path' do + let(:path) { trace.send(:deprecated_path) } + + context 'with valid ci_id' do + before do + build.project.update(ci_id: 1000) + + FileUtils.mkdir_p(File.dirname(path)) + + File.open(path, "w") do |file| + file.write("data") + end + end + + it "trace exist" do + expect(trace.exist?).to be(true) + end + + it "can be erased" do + trace.erase! + expect(trace.exist?).to be(false) + end + end + + context 'without valid ci_id' do + it "does not return deprecated path" do + expect(path).to be_nil + end + end + end + + context 'stored in database' do + before do + build.send(:write_attribute, :trace, "data") + end + + it "trace exist" do + expect(trace.exist?).to be(true) + end + + it "can be erased" do + trace.erase! + expect(trace.exist?).to be(false) + end + + it "returns database data" do + expect(trace.raw).to eq("data") + end + end + end + + describe '#archive!' do + subject { trace.archive! } + + shared_examples 'archive trace file' do + it do + expect { subject }.to change { Ci::JobArtifact.count }.by(1) + + build.reload + expect(build.trace.exist?).to be_truthy + expect(build.job_artifacts_trace.file.exists?).to be_truthy + expect(build.job_artifacts_trace.file.filename).to eq('job.log') + expect(File.exist?(src_path)).to be_falsy + expect(src_checksum) + .to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest) + expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum) + end + end + + shared_examples 'source trace file stays intact' do |error:| + it do + expect { subject }.to raise_error(error) + + build.reload + expect(build.trace.exist?).to be_truthy + expect(build.job_artifacts_trace).to be_nil + expect(File.exist?(src_path)).to be_truthy + end + end + + shared_examples 'archive trace in database' do + it do + expect { subject }.to change { Ci::JobArtifact.count }.by(1) + + build.reload + expect(build.trace.exist?).to be_truthy + expect(build.job_artifacts_trace.file.exists?).to be_truthy + expect(build.job_artifacts_trace.file.filename).to eq('job.log') + expect(build.old_trace).to be_nil + expect(src_checksum) + .to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest) + expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum) + end + end + + shared_examples 'source trace in database stays intact' do |error:| + it do + expect { subject }.to raise_error(error) + + build.reload + expect(build.trace.exist?).to be_truthy + expect(build.job_artifacts_trace).to be_nil + expect(build.old_trace).to eq(trace_content) + end + end + + context 'when job does not have trace artifact' do + context 'when trace file stored in default path' do + let!(:build) { create(:ci_build, :success, :trace_live) } + let!(:src_path) { trace.read { |s| s.path } } + let!(:src_checksum) { Digest::SHA256.file(src_path).hexdigest } + + it_behaves_like 'archive trace file' + + context 'when failed to create clone file' do + before do + allow(IO).to receive(:copy_stream).and_return(0) + end + + it_behaves_like 'source trace file stays intact', error: Gitlab::Ci::Trace::ArchiveError + end + + context 'when failed to create job artifact record' do + before do + allow_any_instance_of(Ci::JobArtifact).to receive(:save).and_return(false) + allow_any_instance_of(Ci::JobArtifact).to receive_message_chain(:errors, :full_messages) + .and_return(%w[Error Error]) + end + + it_behaves_like 'source trace file stays intact', error: ActiveRecord::RecordInvalid + end + end + + context 'when trace is stored in database' do + let(:build) { create(:ci_build, :success) } + let(:trace_content) { 'Sample trace' } + let(:src_checksum) { Digest::SHA256.hexdigest(trace_content) } + + before do + build.update_column(:trace, trace_content) + end + + it_behaves_like 'archive trace in database' + + context 'when failed to create clone file' do + before do + allow(IO).to receive(:copy_stream).and_return(0) + end + + it_behaves_like 'source trace in database stays intact', error: Gitlab::Ci::Trace::ArchiveError + end + + context 'when failed to create job artifact record' do + before do + allow_any_instance_of(Ci::JobArtifact).to receive(:save).and_return(false) + allow_any_instance_of(Ci::JobArtifact).to receive_message_chain(:errors, :full_messages) + .and_return(%w[Error Error]) + end + + it_behaves_like 'source trace in database stays intact', error: ActiveRecord::RecordInvalid + end + + context 'when there is a validation error on Ci::Build' do + before do + allow_any_instance_of(Ci::Build).to receive(:save).and_return(false) + allow_any_instance_of(Ci::Build).to receive_message_chain(:errors, :full_messages) + .and_return(%w[Error Error]) + end + + context "when erase old trace with 'save'" do + before do + build.send(:write_attribute, :trace, nil) + build.save + end + + it 'old trace is not deleted' do + build.reload + expect(build.trace.raw).to eq(trace_content) + end + end + + it_behaves_like 'archive trace in database' + end + end + end + + context 'when job has trace artifact' do + before do + create(:ci_job_artifact, :trace, job: build) + end + + it 'does not archive' do + expect_any_instance_of(described_class).not_to receive(:archive_stream!) + expect { subject }.to raise_error('Already archived') + expect(build.job_artifacts_trace.file.exists?).to be_truthy + end + end + + context 'when job is not finished yet' do + let!(:build) { create(:ci_build, :running, :trace_live) } + + it 'does not archive' do + expect_any_instance_of(described_class).not_to receive(:archive_stream!) + expect { subject }.to raise_error('Job is not finished yet') + expect(build.trace.exist?).to be_truthy + end + end + end +end + +shared_examples_for 'trace with enabled live trace feature' do + it_behaves_like 'common trace features' + + describe '#read' do + shared_examples 'read successfully with IO' do + it 'yields with source' do + trace.read do |stream| + expect(stream).to be_a(Gitlab::Ci::Trace::Stream) + expect(stream.stream).to be_a(IO) + end + end + end + + shared_examples 'read successfully with ChunkedIO' do + it 'yields with source' do + trace.read do |stream| + expect(stream).to be_a(Gitlab::Ci::Trace::Stream) + expect(stream.stream).to be_a(Gitlab::Ci::Trace::ChunkedIO) + end + end + end + + shared_examples 'failed to read' do + it 'yields without source' do + trace.read do |stream| + expect(stream).to be_a(Gitlab::Ci::Trace::Stream) + expect(stream.stream).to be_nil + end + end + end + + context 'when trace artifact exists' do + before do + create(:ci_job_artifact, :trace, job: build) + end + + it_behaves_like 'read successfully with IO' + end + + context 'when live trace exists' do + before do + Gitlab::Ci::Trace::ChunkedIO.new(build) do |stream| + stream.write('abc') + end + end + + it_behaves_like 'read successfully with ChunkedIO' + end + + context 'when no sources exist' do + it_behaves_like 'failed to read' + end + end + + describe 'trace handling' do + subject { trace.exist? } + + context 'trace does not exist' do + it { expect(trace.exist?).to be(false) } + end + + context 'when trace artifact exists' do + before do + create(:ci_job_artifact, :trace, job: build) + end + + it { is_expected.to be_truthy } + + context 'when the trace artifact has been erased' do + before do + trace.erase! + end + + it { is_expected.to be_falsy } + + it 'removes associations' do + expect(Ci::JobArtifact.exists?(job_id: build.id, file_type: :trace)).to be_falsy + end + end + end + + context 'stored in live trace' do + before do + Gitlab::Ci::Trace::ChunkedIO.new(build) do |stream| + stream.write('abc') + end + end + + it "trace exist" do + expect(trace.exist?).to be(true) + end + + it "can be erased" do + trace.erase! + expect(trace.exist?).to be(false) + expect(Ci::BuildTraceChunk.where(build: build)).not_to be_exist + end + + it "returns live trace data" do + expect(trace.raw).to eq("abc") + end + end + end + + describe '#archive!' do + subject { trace.archive! } + + shared_examples 'archive trace file in ChunkedIO' do + it do + expect { subject }.to change { Ci::JobArtifact.count }.by(1) + + build.reload + expect(build.trace.exist?).to be_truthy + expect(build.job_artifacts_trace.file.exists?).to be_truthy + expect(build.job_artifacts_trace.file.filename).to eq('job.log') + expect(Ci::BuildTraceChunk.where(build: build)).not_to be_exist + expect(src_checksum) + .to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest) + expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum) + end + end + + shared_examples 'source trace in ChunkedIO stays intact' do |error:| + it do + expect { subject }.to raise_error(error) + + build.reload + expect(build.trace.exist?).to be_truthy + expect(build.job_artifacts_trace).to be_nil + Gitlab::Ci::Trace::ChunkedIO.new(build) do |stream| + expect(stream.read).to eq(trace_raw) + end + end + end + + context 'when job does not have trace artifact' do + context 'when trace is stored in ChunkedIO' do + let!(:build) { create(:ci_build, :success, :trace_live) } + let!(:trace_raw) { build.trace.raw } + let!(:src_checksum) { Digest::SHA256.hexdigest(trace_raw) } + + it_behaves_like 'archive trace file in ChunkedIO' + + context 'when failed to create clone file' do + before do + allow(IO).to receive(:copy_stream).and_return(0) + end + + it_behaves_like 'source trace in ChunkedIO stays intact', error: Gitlab::Ci::Trace::ArchiveError + end + + context 'when failed to create job artifact record' do + before do + allow_any_instance_of(Ci::JobArtifact).to receive(:save).and_return(false) + allow_any_instance_of(Ci::JobArtifact).to receive_message_chain(:errors, :full_messages) + .and_return(%w[Error Error]) + end + + it_behaves_like 'source trace in ChunkedIO stays intact', error: ActiveRecord::RecordInvalid + end + end + end + + context 'when job has trace artifact' do + before do + create(:ci_job_artifact, :trace, job: build) + end + + it 'does not archive' do + expect_any_instance_of(described_class).not_to receive(:archive_stream!) + expect { subject }.to raise_error('Already archived') + expect(build.job_artifacts_trace.file.exists?).to be_truthy + end + end + + context 'when job is not finished yet' do + let!(:build) { create(:ci_build, :running, :trace_live) } + + it 'does not archive' do + expect_any_instance_of(described_class).not_to receive(:archive_stream!) + expect { subject }.to raise_error('Job is not finished yet') + expect(build.trace.exist?).to be_truthy + end + end + end +end -- GitLab From 8b47980e12887e255146ce155b446d9176586cfe Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 4 May 2018 20:02:14 +0900 Subject: [PATCH 78/86] Clean up "when redis had an outage" context in runner spec --- spec/requests/api/runner_spec.rb | 52 +++++++++++++++++++++++--------- 1 file changed, 37 insertions(+), 15 deletions(-) diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb index ff23b6252447..592256a3acf7 100644 --- a/spec/requests/api/runner_spec.rb +++ b/spec/requests/api/runner_spec.rb @@ -867,26 +867,48 @@ def update_job(token = job.token, **params) end end - context 'when redis had an outage' do - it "recovers" do - # GitLab-Runner patchs + context 'when trace is patched' do + before do patch_the_trace + end + + it 'has valid trace' do + expect(response.status).to eq(202) expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended' + end + + context 'when redis data are flushed' do + before do + redis_shared_state_cleanup! + end + + it 'has empty trace' do + expect(job.reload.trace.raw).to eq '' + end - # GitLab-Rails encounters an outage on Redis - redis_shared_state_cleanup! - expect(job.reload.trace.raw).to eq '' + context 'when we perform partial patch' do + before do + patch_the_trace('hello', headers.merge({ 'Content-Range' => "28-32" })) + end - # GitLab-Runner patchs - patch_the_trace('hello', headers.merge({ 'Content-Range' => "28-32" })) - expect(response.status).to eq 202 - expect(response.header).to have_key 'Range' - expect(response.header['Range']).to eq '0-0' - expect(job.reload.trace.raw).to eq '' + it 'returns an error' do + expect(response.status).to eq(202) + expect(response.header).to have_key 'Range' + expect(response.header['Range']).to eq '0-0' + expect(job.reload.trace.raw).to eq '' + end + end - # GitLab-Runner re-patchs - patch_the_trace('BUILD TRACE appended appended hello') - expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended hello' + context 'when we resend full trace' do + before do + patch_the_trace('BUILD TRACE appended appended hello') + end + + it 'succeeds with updating trace' do + expect(response.status).to eq(202) + expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended hello' + end + end end end end -- GitLab From c1d3b48c96ce44a2ff3e84cb89063a00c67297f5 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 4 May 2018 21:48:10 +0900 Subject: [PATCH 79/86] Change BuildTraceChunkFlushWorker's queue from pipeline_processing to pipeline_background --- app/workers/all_queues.yml | 2 +- app/workers/ci/build_trace_chunk_flush_worker.rb | 3 +-- spec/requests/api/runner_spec.rb | 2 +- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml index dc628bb7f50c..5d9ec6142d78 100644 --- a/app/workers/all_queues.yml +++ b/app/workers/all_queues.yml @@ -52,6 +52,7 @@ - pipeline_creation:create_pipeline - pipeline_creation:run_pipeline_schedule - pipeline_background:archive_trace +- pipeline_background:ci_build_trace_chunk_flush - pipeline_default:build_coverage - pipeline_default:build_trace_sections - pipeline_default:pipeline_metrics @@ -66,7 +67,6 @@ - pipeline_processing:pipeline_update - pipeline_processing:stage_update - pipeline_processing:update_head_pipeline_for_merge_request -- pipeline_processing:ci_build_trace_chunk_flush - repository_check:repository_check_clear - repository_check:repository_check_single_repository diff --git a/app/workers/ci/build_trace_chunk_flush_worker.rb b/app/workers/ci/build_trace_chunk_flush_worker.rb index b8f8be29c7bd..218d6688bd9e 100644 --- a/app/workers/ci/build_trace_chunk_flush_worker.rb +++ b/app/workers/ci/build_trace_chunk_flush_worker.rb @@ -1,8 +1,7 @@ module Ci class BuildTraceChunkFlushWorker include ApplicationWorker - - queue_namespace :pipeline_processing + include PipelineBackgroundQueue def perform(build_trace_chunk_id) ::Ci::BuildTraceChunk.find_by(id: build_trace_chunk_id).try do |build_trace_chunk| diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb index 592256a3acf7..70c3529da03d 100644 --- a/spec/requests/api/runner_spec.rb +++ b/spec/requests/api/runner_spec.rb @@ -901,7 +901,7 @@ def update_job(token = job.token, **params) context 'when we resend full trace' do before do - patch_the_trace('BUILD TRACE appended appended hello') + patch_the_trace('BUILD TRACE appended appended hello', headers.merge({ 'Content-Range' => "0-32" })) end it 'succeeds with updating trace' do -- GitLab From 8d8534d7ab31ef9f7bdc4e00c54bcda9d9bf93d9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Trzci=C5=84ski?= Date: Sun, 6 May 2018 19:54:41 +0200 Subject: [PATCH 80/86] Enforce proper 416 support for runner trace patch endpoint --- lib/api/runner.rb | 17 ++++++++++++++--- lib/gitlab/ci/trace.rb | 2 +- spec/requests/api/runner_spec.rb | 14 ++++++-------- ..._exmaples.rb => ci_trace_shared_examples.rb} | 0 4 files changed, 21 insertions(+), 12 deletions(-) rename spec/support/shared_examples/{ci_trace_shared_exmaples.rb => ci_trace_shared_examples.rb} (100%) diff --git a/lib/api/runner.rb b/lib/api/runner.rb index 67896ae1fc55..cd7d6603171b 100644 --- a/lib/api/runner.rb +++ b/lib/api/runner.rb @@ -153,9 +153,20 @@ class Runner < Grape::API content_range = request.headers['Content-Range'] content_range = content_range.split('-') - stream_size = job.trace.append(request.body.read, content_range[0].to_i) - if stream_size < 0 - break error!('416 Range Not Satisfiable', 416, { 'Range' => "0-#{-stream_size}" }) + # TODO: + # it seems that `Content-Range` as formatted by runner is wrong, + # the `byte_end` should point to final byte, but it points byte+1 + # that means that we have to calculate end of body, + # as we cannot use `content_length[1]` + # Issue: https://gitlab.com/gitlab-org/gitlab-runner/issues/3275 + + body_data = request.body.read + body_start = content_range[0].to_i + body_end = body_start + body_data.bytesize + + stream_size = job.trace.append(body_data, body_start) + unless stream_size == body_end + break error!('416 Range Not Satisfiable', 416, { 'Range' => "0-#{stream_size}" }) end status 202 diff --git a/lib/gitlab/ci/trace.rb b/lib/gitlab/ci/trace.rb index 65c8b9118c67..f46d1d39ea77 100644 --- a/lib/gitlab/ci/trace.rb +++ b/lib/gitlab/ci/trace.rb @@ -45,7 +45,7 @@ def set(data) def append(data, offset) write('a+b') do |stream| current_length = stream.size - break -current_length unless current_length == offset + break current_length unless current_length == offset data = job.hide_secrets(data) stream.append(data, offset) diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb index f02c001f85d6..082605827b7d 100644 --- a/spec/requests/api/runner_spec.rb +++ b/spec/requests/api/runner_spec.rb @@ -906,20 +906,18 @@ def update_job(token = job.token, **params) context 'when we perform partial patch' do before do - patch_the_trace('hello', headers.merge({ 'Content-Range' => "28-32" })) + patch_the_trace('hello', headers.merge({ 'Content-Range' => "28-32/5" })) end it 'returns an error' do - expect(response.status).to eq(202) - expect(response.header).to have_key 'Range' - expect(response.header['Range']).to eq '0-0' - expect(job.reload.trace.raw).to eq '' + expect(response.status).to eq(416) + expect(response.header['Range']).to eq('0-0') end end context 'when we resend full trace' do before do - patch_the_trace('BUILD TRACE appended appended hello', headers.merge({ 'Content-Range' => "0-32" })) + patch_the_trace('BUILD TRACE appended appended hello', headers.merge({ 'Content-Range' => "0-34/35" })) end it 'succeeds with updating trace' do @@ -945,7 +943,7 @@ def update_job(token = job.token, **params) end context 'when content-range start is too big' do - let(:headers_with_range) { headers.merge({ 'Content-Range' => '15-20' }) } + let(:headers_with_range) { headers.merge({ 'Content-Range' => '15-20/6' }) } it 'gets 416 error response with range headers' do expect(response.status).to eq 416 @@ -955,7 +953,7 @@ def update_job(token = job.token, **params) end context 'when content-range start is too small' do - let(:headers_with_range) { headers.merge({ 'Content-Range' => '8-20' }) } + let(:headers_with_range) { headers.merge({ 'Content-Range' => '8-20/13' }) } it 'gets 416 error response with range headers' do expect(response.status).to eq 416 diff --git a/spec/support/shared_examples/ci_trace_shared_exmaples.rb b/spec/support/shared_examples/ci_trace_shared_examples.rb similarity index 100% rename from spec/support/shared_examples/ci_trace_shared_exmaples.rb rename to spec/support/shared_examples/ci_trace_shared_examples.rb -- GitLab From b71320a29658ecb423b1f557656adba9f9b1c562 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Mon, 7 May 2018 16:21:09 +0900 Subject: [PATCH 81/86] Add doc about this architecture, impact, roadmap, etc --- doc/administration/job_traces.md | 96 ++++++++++++++++++++++++++++++++ 1 file changed, 96 insertions(+) diff --git a/doc/administration/job_traces.md b/doc/administration/job_traces.md index 84a1ffeec98b..3470274e5ea5 100644 --- a/doc/administration/job_traces.md +++ b/doc/administration/job_traces.md @@ -40,3 +40,99 @@ To change the location where the job logs will be stored, follow the steps below [reconfigure gitlab]: restart_gitlab.md#omnibus-gitlab-reconfigure "How to reconfigure Omnibus GitLab" [restart gitlab]: restart_gitlab.md#installations-from-source "How to restart GitLab" + +## New live trace architecture + +> [Introduced][ce-18169] in GitLab 10.4. + +> **Notes**: +- This feature is still Beta, which could impact GitLab.com/on-premises instances, and in the worst case scenario, traces will be lost. +- This feature is still being discussed in [an issue](https://gitlab.com/gitlab-org/gitlab-ce/issues/46097) for the performance improvements. +- This feature is off by default. Please check below how to enable/disable this featrue. + +**What is "live trace"?** + +It's job traces exists while job is being processed by Gitlab-Runner. You can see the progress in job pages(GUI). +In contrast, all traces will be archived after job is finished, that's called "archived trace". + +**What is new architecture?** + +So far, when GitLab-Runner sends a job trace to GitLab-Rails, traces have been saved to File Storage as text files. +This was a problem on [Cloud Native-compatible GitLab application](https://gitlab.com/gitlab-com/migration/issues/23) that +GitLab-Rails had to rely on File Storage. + +This new live trace architecture stores traces to Redis and Database instead of File Storage. +Redis is used as first-class trace storage, it stores each trace upto 128KB. Once the data is fulfileld, it's flushed to Database. Afterwhile, the data in Redis and Database will be archived to ObjectStorage. + +Here is the detailed data flow. + +1. GitLab-Runner picks a job from GitLab-Rails +1. GitLab-Runner sends a piece of trace to GitLab-Rails +1. GitLab-Rails appends the data to Redis +1. If the data in Redis is fulfilled 128KB, the data is flushed to Database. +1. 2.~4. is continued until the job is finished +1. Once the job is finished, GitLab-Rails schedules a sidekiq worker to archive the trace +1. The sidekiq worker archives the trace to Object Storage, and cleanup the trace in Redis and Database + +**How to check if it's on or off** + +```ruby +Feature.enabled?('ci_enable_live_trace') +``` + +**How to enable** + +```ruby +Feature.enable('ci_enable_live_trace') +``` + +>**Note:** +The transition period will be handled gracefully. Upcoming traces will be generated with the new architecture, and on-going live traces will stay with the legacy architecture (i.e. on-going live traces won't be re-generated forcibly with the new architecture). + +**How to disable** + +```ruby +Feature.disable('ci_enable_live_trace') +``` + +>**Note:** +The transition period will be handled gracefully. Upcoming traces will be generated with the legacy architecture, and on-going live traces will stay with the new architecture (i.e. on-going live traces won't be re-generated forcibly with the legacy architecture). + +**Redis namespace** + +`Gitlab::Redis::SharedState` + +**Potential impact** + +- This feature could incur data loss + - Case 1: When all data in Redis are accidentally flushed. + - On-going live traces could be recovered by re-sending traces (This is supported by all versions of GitLab-Runner) + - Finished jobs which has not archived live traces will lose the last part(~128KB) of trace data. + - Case 2: When sidekiq workers failed to archive (e.g. There was a bug that prevents archiving process, Sidekiq inconsistancy, etc) + - Currently all trace data in Redis will be deleted after one week. If the sidekiq workers can't finish by the expiry date, the part of trace data will be lost. +- This feature could consume all memeory on Redis instance. If the number of jobs is 1000, 128KB * 1000 = 128MB is consumed. +- This feature could pressure Database instance. `INSERT` is queried per 128KB per a job. `UPDATE` is queried with the same condition, but only if the total size of the trace exceeds 128KB. +- and so on + +**How to test** + +We're currently evaluating this feature on dev.gitalb.org or staging.gitlab.com to verify this features. Here is the list of tests/measurements. + +- Features + - Live traces should be visible on job pages + - Archived traces should be visible on job pages + - Live traces should be archived to Object storage + - Live traces should be cleaned up after archived + - etc +- Performance + - Schedule 1000~10000 jobs and let GitLab-runners process concurrently. Measure memoery presssure, IO load, etc. + - etc +- Failover + - Simulate Redis outage + - etc + +**How to verify the correctnesss** + + - TBD + +[ce-44935]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/18169 -- GitLab From 819fccd6c7d72fabc3bfa3807094f4c9a3226292 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Mon, 7 May 2018 16:54:47 +0900 Subject: [PATCH 82/86] Fix spec: ci_trace_shared_examples.rb for positive offset by append --- spec/support/shared_examples/ci_trace_shared_examples.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/support/shared_examples/ci_trace_shared_examples.rb b/spec/support/shared_examples/ci_trace_shared_examples.rb index 5640451dad3c..21c6f3c829fe 100644 --- a/spec/support/shared_examples/ci_trace_shared_examples.rb +++ b/spec/support/shared_examples/ci_trace_shared_examples.rb @@ -196,7 +196,7 @@ context 'tries to append trace at different offset' do it "fails with append" do - expect(trace.append("56", 2)).to eq(-4) + expect(trace.append("56", 2)).to eq(4) expect(trace.raw).to eq("1234") end end -- GitLab From 2a03142c948f65311fd784d620c2d2082882bcd4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Trzci=C5=84ski?= Date: Mon, 7 May 2018 10:34:47 +0200 Subject: [PATCH 83/86] Optimise write lock parameters --- app/models/ci/build_trace_chunk.rb | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb index 08a4465821c7..5715477266a3 100644 --- a/app/models/ci/build_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -12,9 +12,9 @@ class BuildTraceChunk < ActiveRecord::Base CHUNK_SIZE = 128.kilobytes CHUNK_REDIS_TTL = 1.week - WRITE_LOCK_RETRY = 100 - WRITE_LOCK_SLEEP = 1 - WRITE_LOCK_TTL = 5.minutes + WRITE_LOCK_RETRY = 10 + WRITE_LOCK_SLEEP = 5.milisecond + WRITE_LOCK_TTL = 1.minute enum data_store: { redis: 1, @@ -96,7 +96,7 @@ def set_data(value) save! if changed? end - schedule_to_db if fullfilled? + schedule_to_db if full? end def schedule_to_db @@ -105,7 +105,7 @@ def schedule_to_db Ci::BuildTraceChunkFlushWorker.perform_async(id) end - def fullfilled? + def full? size == CHUNK_SIZE end -- GitLab From c5067b1232112804aa9634aef3bafb5ec1cc690e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Trzci=C5=84ski?= Date: Mon, 7 May 2018 10:40:40 +0200 Subject: [PATCH 84/86] Update `job_traces.md` documentation --- doc/administration/job_traces.md | 55 ++++++++++++++++---------------- 1 file changed, 27 insertions(+), 28 deletions(-) diff --git a/doc/administration/job_traces.md b/doc/administration/job_traces.md index 3470274e5ea5..f0b2054a7f3f 100644 --- a/doc/administration/job_traces.md +++ b/doc/administration/job_traces.md @@ -52,35 +52,34 @@ To change the location where the job logs will be stored, follow the steps below **What is "live trace"?** -It's job traces exists while job is being processed by Gitlab-Runner. You can see the progress in job pages(GUI). -In contrast, all traces will be archived after job is finished, that's called "archived trace". +Job trace that is sent by runner while jobs are running. You can see live trace in job pages UI. +The live traces are archived once job finishes. **What is new architecture?** -So far, when GitLab-Runner sends a job trace to GitLab-Rails, traces have been saved to File Storage as text files. -This was a problem on [Cloud Native-compatible GitLab application](https://gitlab.com/gitlab-com/migration/issues/23) that -GitLab-Rails had to rely on File Storage. +So far, when GitLab Runner sends a job trace to GitLab-Rails, traces have been saved to file storage as text files. +This was a problem for [Cloud Native-compatible GitLab application](https://gitlab.com/gitlab-com/migration/issues/23) where GitLab had to rely on File Storage. -This new live trace architecture stores traces to Redis and Database instead of File Storage. -Redis is used as first-class trace storage, it stores each trace upto 128KB. Once the data is fulfileld, it's flushed to Database. Afterwhile, the data in Redis and Database will be archived to ObjectStorage. +This new live trace architecture stores chunks of traces in Redis and database instead of file storage. +Redis is used as first-class storage, and it stores up-to 128kB. Once the full chunk is sent it will be flushed to database. Afterwhile, the data in Redis and database will be archived to ObjectStorage. Here is the detailed data flow. -1. GitLab-Runner picks a job from GitLab-Rails -1. GitLab-Runner sends a piece of trace to GitLab-Rails +1. GitLab Runner picks a job from GitLab-Rails +1. GitLab Runner sends a piece of trace to GitLab-Rails 1. GitLab-Rails appends the data to Redis -1. If the data in Redis is fulfilled 128KB, the data is flushed to Database. +1. If the data in Redis is fulfilled 128kB, the data is flushed to Database. 1. 2.~4. is continued until the job is finished 1. Once the job is finished, GitLab-Rails schedules a sidekiq worker to archive the trace 1. The sidekiq worker archives the trace to Object Storage, and cleanup the trace in Redis and Database -**How to check if it's on or off** +**How to check if it's on or off?** ```ruby Feature.enabled?('ci_enable_live_trace') ``` -**How to enable** +**How to enable?** ```ruby Feature.enable('ci_enable_live_trace') @@ -89,7 +88,7 @@ Feature.enable('ci_enable_live_trace') >**Note:** The transition period will be handled gracefully. Upcoming traces will be generated with the new architecture, and on-going live traces will stay with the legacy architecture (i.e. on-going live traces won't be re-generated forcibly with the new architecture). -**How to disable** +**How to disable?** ```ruby Feature.disable('ci_enable_live_trace') @@ -98,41 +97,41 @@ Feature.disable('ci_enable_live_trace') >**Note:** The transition period will be handled gracefully. Upcoming traces will be generated with the legacy architecture, and on-going live traces will stay with the new architecture (i.e. on-going live traces won't be re-generated forcibly with the legacy architecture). -**Redis namespace** +**Redis namespace:** `Gitlab::Redis::SharedState` -**Potential impact** +**Potential impact:** -- This feature could incur data loss +- This feature could incur data loss: - Case 1: When all data in Redis are accidentally flushed. - - On-going live traces could be recovered by re-sending traces (This is supported by all versions of GitLab-Runner) - - Finished jobs which has not archived live traces will lose the last part(~128KB) of trace data. - - Case 2: When sidekiq workers failed to archive (e.g. There was a bug that prevents archiving process, Sidekiq inconsistancy, etc) + - On-going live traces could be recovered by re-sending traces (This is supported by all versions of GitLab Runner) + - Finished jobs which has not archived live traces will lose the last part (~128kB) of trace data. + - Case 2: When sidekiq workers failed to archive (e.g. There was a bug that prevents archiving process, Sidekiq inconsistancy, etc): - Currently all trace data in Redis will be deleted after one week. If the sidekiq workers can't finish by the expiry date, the part of trace data will be lost. -- This feature could consume all memeory on Redis instance. If the number of jobs is 1000, 128KB * 1000 = 128MB is consumed. -- This feature could pressure Database instance. `INSERT` is queried per 128KB per a job. `UPDATE` is queried with the same condition, but only if the total size of the trace exceeds 128KB. +- This feature could consume all memory on Redis instance. If the number of jobs is 1000, 128MB (128kB * 1000) is consumed. +- This feature could pressure Database replication lag. `INSERT` are generated to indicate that we have trace chunk. `UPDATE` with 128kB of data is issued once we receive multiple chunks. - and so on -**How to test** +**How to test?** We're currently evaluating this feature on dev.gitalb.org or staging.gitlab.com to verify this features. Here is the list of tests/measurements. -- Features +- Features: - Live traces should be visible on job pages - Archived traces should be visible on job pages - Live traces should be archived to Object storage - Live traces should be cleaned up after archived - etc -- Performance +- Performance: - Schedule 1000~10000 jobs and let GitLab-runners process concurrently. Measure memoery presssure, IO load, etc. - etc -- Failover +- Failover: - Simulate Redis outage - etc -**How to verify the correctnesss** - - - TBD +**How to verify the correctnesss?** + +- TBD [ce-44935]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/18169 -- GitLab From d6d25d2da67fce3bb1392317f4f08f5423ee687d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Trzci=C5=84ski?= Date: Mon, 7 May 2018 11:05:26 +0200 Subject: [PATCH 85/86] Fix WRITE_LOCK_SLEEP --- app/models/ci/build_trace_chunk.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb index 5715477266a3..779daa5fa451 100644 --- a/app/models/ci/build_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -13,7 +13,7 @@ class BuildTraceChunk < ActiveRecord::Base CHUNK_SIZE = 128.kilobytes CHUNK_REDIS_TTL = 1.week WRITE_LOCK_RETRY = 10 - WRITE_LOCK_SLEEP = 5.milisecond + WRITE_LOCK_SLEEP = 0.01.second WRITE_LOCK_TTL = 1.minute enum data_store: { -- GitLab From e1d11cc64970d712352de0c5daadced7f274ea3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Trzci=C5=84ski?= Date: Mon, 7 May 2018 11:45:38 +0200 Subject: [PATCH 86/86] Fix rubocop --- app/models/ci/build_trace_chunk.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb index 779daa5fa451..d9e923daa71c 100644 --- a/app/models/ci/build_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -13,7 +13,7 @@ class BuildTraceChunk < ActiveRecord::Base CHUNK_SIZE = 128.kilobytes CHUNK_REDIS_TTL = 1.week WRITE_LOCK_RETRY = 10 - WRITE_LOCK_SLEEP = 0.01.second + WRITE_LOCK_SLEEP = 0.01.seconds WRITE_LOCK_TTL = 1.minute enum data_store: { -- GitLab