@@ -343,11 +343,80 @@ static void added_object(unsigned nr, enum object_type type,
}
}
+struct input_zstream_data {
+ git_zstream *zstream;
+ unsigned char buf[8192];
+ int status;
+};
+
+static const void *feed_input_zstream(struct input_stream *in_stream,
+ unsigned long *readlen)
+{
+ struct input_zstream_data *data = in_stream->data;
+ git_zstream *zstream = data->zstream;
+ void *in = fill(1);
+
+ if (in_stream->is_finished) {
+ *readlen = 0;
+ return NULL;
+ }
+
+ zstream->next_out = data->buf;
+ zstream->avail_out = sizeof(data->buf);
+ zstream->next_in = in;
+ zstream->avail_in = len;
+
+ data->status = git_inflate(zstream, 0);
+
+ in_stream->is_finished = data->status != Z_OK;
+ use(len - zstream->avail_in);
+ *readlen = sizeof(data->buf) - zstream->avail_out;
+
+ return data->buf;
+}
+
+static void write_stream_blob(unsigned nr, size_t size)
+{
+ git_zstream zstream = { 0 };
+ struct input_zstream_data data = { 0 };
+ struct input_stream in_stream = {
+ .read = feed_input_zstream,
+ .data = &data,
+ };
+
+ data.zstream = &zstream;
+ git_inflate_init(&zstream);
+
+ if (stream_loose_object(&in_stream, size, &obj_list[nr].oid))
+ die(_("failed to write object in stream"));
+
+ if (data.status != Z_STREAM_END)
+ die(_("inflate returned (%d)"), data.status);
+ git_inflate_end(&zstream);
+
+ if (strict) {
+ struct blob *blob =
+ lookup_blob(the_repository, &obj_list[nr].oid);
+ if (blob)
+ blob->object.flags |= FLAG_WRITTEN;
+ else
+ die(_("invalid blob object from stream"));
+ }
+ obj_list[nr].obj = NULL;
+}
+
static void unpack_non_delta_entry(enum object_type type, unsigned long size,
unsigned nr)
{
- void *buf = get_data(size);
+ void *buf;
+
+ /* Write large blob in stream without allocating full buffer. */
+ if (!dry_run && type == OBJ_BLOB && size > big_file_threshold) {
+ write_stream_blob(nr, size);
+ return;
+ }
+ buf = get_data(size);
if (buf)
write_object(nr, type, buf, size);
}
@@ -9,7 +9,11 @@ test_description='git unpack-objects with large objects'
prepare_dest () {
test_when_finished "rm -rf dest.git" &&
- git init --bare dest.git
+ git init --bare dest.git &&
+ if test -n "$1"
+ then
+ git -C dest.git config core.bigFileThreshold $1
+ fi
}
assert_no_loose () {
@@ -37,16 +41,29 @@ test_expect_success 'set memory limitation to 1MB' '
'
test_expect_success 'unpack-objects failed under memory limitation' '
- prepare_dest &&
+ prepare_dest 2m &&
test_must_fail git -C dest.git unpack-objects <test-$PACK.pack 2>err &&
grep "fatal: attempting to allocate" err
'
test_expect_success 'unpack-objects works with memory limitation in dry-run mode' '
- prepare_dest &&
+ prepare_dest 2m &&
git -C dest.git unpack-objects -n <test-$PACK.pack &&
assert_no_loose &&
assert_no_pack
'
+test_expect_success 'unpack big object in stream' '
+ prepare_dest 1m &&
+ git -C dest.git unpack-objects <test-$PACK.pack &&
+ assert_no_pack
+'
+
+test_expect_success 'do not unpack existing large objects' '
+ prepare_dest 1m &&
+ git -C dest.git index-pack --stdin <test-$PACK.pack &&
+ git -C dest.git unpack-objects <test-$PACK.pack &&
+ assert_no_loose
+'
+
test_done