|
@@ -10,7 +10,8 @@ namespace Pprf {
|
|
|
|
|
|
public enum UploadStatus {
|
|
|
INITIATING_SESSION,
|
|
|
- SENDING_CHUNKS,
|
|
|
+ UPLOADING_CHUNK,
|
|
|
+ UPLOADED_CHUNK,
|
|
|
UNPUBLISHING,
|
|
|
FINALISING_SESSION,
|
|
|
COMPLETE
|
|
@@ -85,11 +86,17 @@ namespace Pprf {
|
|
|
return (Messages.UploadSession)response;
|
|
|
}
|
|
|
|
|
|
- public void send_upload_chunk(BinaryData collection_id, Messages.UploadSession session, uint64 offset, Bytes chunk, MemberIdentity identity) throws Error {
|
|
|
+ public void send_upload_chunk(BinaryData collection_id, Messages.UploadSession session, uint64 offset, Bytes chunk, MemberIdentity identity, Messages.TrackedBytesMessageBodyCallback? callback = null) throws Error {
|
|
|
var message = new Messages.Upload();
|
|
|
message.collection_id = collection_id;
|
|
|
message.offset = offset;
|
|
|
- message.upload_chunk = new Messages.BytesMessageBody(chunk);
|
|
|
+
|
|
|
+ if(callback == null) {
|
|
|
+ message.upload_chunk = new Messages.BytesMessageBody(chunk);
|
|
|
+ }
|
|
|
+ else {
|
|
|
+ message.upload_chunk = new Messages.TrackedBytesMessageBody(chunk, callback);
|
|
|
+ }
|
|
|
var checksum = Util.data_checksum(chunk.get_data());
|
|
|
message.authenticate(session.session_authentication, checksum, identity.credentials);
|
|
|
|
|
@@ -108,7 +115,7 @@ namespace Pprf {
|
|
|
assert_expected_type(response, typeof(Messages.Confirmation));
|
|
|
}
|
|
|
|
|
|
- const uint32 MAX_CHUNK_SIZE = 524288;
|
|
|
+ const uint32 MAX_CHUNK_SIZE = 1073741824;
|
|
|
public void upload(BinaryData collection_id, InputStream data, uint64 size, string destination, bool unpublish_before_finalise, MemberIdentity identity, UploadProgressDelegate? progress_cb = null, uint8 flags = 0) throws Error {
|
|
|
UploadProgressDelegate cb = () => {};
|
|
|
if(progress_cb != null) {
|
|
@@ -121,16 +128,17 @@ namespace Pprf {
|
|
|
var checksum = new Checksum(ChecksumType.SHA512);
|
|
|
var chunk_size = uint32.min(MAX_CHUNK_SIZE, session.max_chunk_size);
|
|
|
uint64 offset = 0;
|
|
|
+ uint64 written = 0;
|
|
|
|
|
|
- cb(0, size, UploadStatus.SENDING_CHUNKS);
|
|
|
+ cb(0, size, UploadStatus.UPLOADING_CHUNK);
|
|
|
while(offset < size) {
|
|
|
var to_read = uint32.min(chunk_size, (uint32)(size - offset));
|
|
|
var chunk = data.read_bytes(to_read);
|
|
|
checksum.update(chunk.get_data(), chunk.length);
|
|
|
|
|
|
- send_upload_chunk(collection_id, session, offset, chunk, identity);
|
|
|
+ send_upload_chunk(collection_id, session, offset, chunk, identity, (w) => cb(written += w, size, UploadStatus.UPLOADING_CHUNK));
|
|
|
offset += chunk.length;
|
|
|
- cb(offset, size, UploadStatus.SENDING_CHUNKS);
|
|
|
+ cb(offset, size, UploadStatus.UPLOADED_CHUNK);
|
|
|
}
|
|
|
|
|
|
size_t dig_len = 64;
|