summaryrefslogtreecommitdiffstats
path: root/src/main/java/org/openslx/filetransfer/util/FileChunk.java
blob: 99b30eac5c3c8900f23c12c44a88e58a8d9d5504 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
package org.openslx.filetransfer.util;

import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.zip.CRC32;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.openslx.filetransfer.FileRange;
import org.openslx.filetransfer.LocalChunkSource.ChunkSource;

public class FileChunk
{
	
	private static final Logger LOGGER = LogManager.getLogger( FileChunk.class );
	
	/**
	 * Length in bytes of binary sha1 representation
	 */
	public static final int SHA1_LENGTH = 20;
	public static final int CHUNK_SIZE_MIB = 16;
	public static final int CHUNK_SIZE = CHUNK_SIZE_MIB * ( 1024 * 1024 );

	public final FileRange range;
	private int failCount = 0;
	protected byte[] sha1sum;
	protected CRC32 crc32;
	protected ChunkStatus status = ChunkStatus.MISSING;
	private boolean writtenToDisk = false;
	private ChunkSource localSource = null;

	static final byte[] NULL_BLOCK_SHA1 = new byte[] {
			0x3b, 0x44, 0x17, (byte)0xfc, 0x42, 0x1c, (byte)0xee, 0x30, (byte)0xa9, (byte)0xad, 0x0f,
			(byte)0xd9, 0x31, (byte)0x92, 0x20, (byte)0xa8, (byte)0xda, (byte)0xe3, 0x2d, (byte)0xa2
	};
	
	static final long NULL_BLOCK_CRC32 = 2759631178l;

	public FileChunk( long startOffset, long endOffset, byte[] sha1sum )
	{
		this.range = new FileRange( startOffset, endOffset );
		if ( sha1sum == null || sha1sum.length != SHA1_LENGTH ) {
			this.sha1sum = null;
		} else {
			this.sha1sum = sha1sum;
		}
	}

	synchronized boolean setSha1Sum( byte[] sha1sum )
	{
		if ( this.sha1sum != null || sha1sum == null || sha1sum.length != SHA1_LENGTH )
			return false;
		this.sha1sum = sha1sum;
		if ( Arrays.equals( sha1sum, NULL_BLOCK_SHA1 ) ) {
			// 
			writtenToDisk = true;
			if ( crc32 == null ) {
				crc32 = new CRC32() {
					@Override
					public long getValue()
					{
						return NULL_BLOCK_CRC32;
					}
				};
			}
			return true;
		}
		if ( this.status == ChunkStatus.COMPLETE ) {
			this.status = ChunkStatus.HASHING;
		}
		return true;
	}

	/**
	 * Signal that transferring this chunk seems to have failed (checksum
	 * mismatch).
	 * 
	 * @return Number of times the transfer failed now
	 */
	synchronized int incFailed()
	{
		return ++failCount;
	}

	public int getChunkIndex()
	{
		return (int) ( range.startOffset / CHUNK_SIZE );
	}

	@Override
	public String toString()
	{
		return "[Chunk " + getChunkIndex() + " (" + status + "), fails: " + failCount + "]";
	}

	public synchronized byte[] getSha1Sum()
	{
		return sha1sum;
	}

	public synchronized ChunkStatus getStatus()
	{
		return status;
	}

	public synchronized void calculateDnbd3Crc32( byte[] data )
	{
		// As this is usually called before we validated the sha1, handle the case where
		// this gets called multiple times and only remember the last result
		long old = Long.MAX_VALUE;
		if ( crc32 == null ) {
			crc32 = new CRC32();
		} else {
			LOGGER.info( "Redoing CRC32 of Chunk " + getChunkIndex() );
			old = crc32.getValue();
			crc32.reset();
		}
		int expectedLength = range.getLength();
		if ( expectedLength > data.length ) {
			LOGGER.error( "Chunk #" + getChunkIndex() + ": " + data.length + " instead of " + expectedLength + " for " + getChunkIndex() );
		}
		crc32.update( data, 0, expectedLength );
		if ( ( expectedLength % 4096 ) != 0 ) {
			// DNBD3 virtually pads all images to be a multiple of 4KiB in size,
			// so simulate that here too
			LOGGER.debug( "Block " + getChunkIndex() + " not multiple of 4k." );
			byte[] padding = new byte[ 4096 - ( expectedLength % 4096 ) ];
			crc32.update( padding );
		}
		if ( old != Long.MAX_VALUE && old != crc32.getValue() ) {
			LOGGER.warn( String.format( "Changed from %x to %x", old, crc32.getValue() ) );
		}
	}

	public synchronized void getCrc32Le( byte[] buffer, int offset )
	{
		if ( crc32 == null )
			throw new IllegalStateException( "Trying to get CRC32 on Chunk that doesn't have one" );
		int value = (int)crc32.getValue();
		buffer[offset + 3] = (byte) ( value >>> 24 );
		buffer[offset + 2] = (byte) ( value >>> 16 );
		buffer[offset + 1] = (byte) ( value >>> 8 );
		buffer[offset + 0] = (byte)value;
	}

	/**
	 * Whether the chunk of data this chunk refers to has been written to
	 * disk and is assumed to be valid/up to date.
	 */
	public synchronized boolean isWrittenToDisk()
	{
		return writtenToDisk;
	}

	synchronized void setStatus( ChunkStatus status )
	{
		if ( status != null ) {
			if ( status == ChunkStatus.COMPLETE ) {
				this.writtenToDisk = true;
			} else if ( status == ChunkStatus.MISSING || status == ChunkStatus.QUEUED_FOR_COPY ) {
				this.writtenToDisk = false;
			}
			this.status = status;
		}
	}

	//

	public static int fileSizeToChunkCount( long fileSize )
	{
		return (int) ( ( fileSize + CHUNK_SIZE - 1 ) / CHUNK_SIZE );
	}

	public static void createChunkList( List<FileChunk> list, long fileSize, List<byte[]> sha1Sums )
	{
		if ( fileSize < 0 )
			throw new IllegalArgumentException( "fileSize cannot be negative" );
		if ( !list.isEmpty() )
			throw new IllegalArgumentException( "Passed list is not empty" );
		long offset = 0;
		Iterator<byte[]> hashIt = null;
		if ( sha1Sums != null ) {
			hashIt = sha1Sums.iterator();
		}
		while ( offset < fileSize ) {
			long end = offset + CHUNK_SIZE;
			if ( end > fileSize )
				end = fileSize;
			byte[] hash = null;
			if ( hashIt != null && hashIt.hasNext() ) {
				hash = hashIt.next();
			}
			list.add( new FileChunk( offset, end, hash ) );
			offset = end;
		}
	}

	public int getFailCount()
	{
		return failCount;
	}

	public void setSource( ChunkSource src )
	{
		this.localSource = src;
	}
	
	public ChunkSource getSources()
	{
		return this.localSource;
	}

}