summaryrefslogtreecommitdiffstats
path: root/dozentenmodul/src/main/java/org/openslx/dozmod/filetransfer/AsyncHashGenerator.java
blob: 83e893027c1f28d03a757535ed9226af7a1f6002 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
package org.openslx.dozmod.filetransfer;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.openslx.bwlp.thrift.iface.TInvalidTokenException;
import org.openslx.dozmod.thrift.Session;
import org.openslx.filetransfer.util.FileChunk;
import org.openslx.thrifthelper.ThriftManager;
import org.openslx.util.GrowingThreadPoolExecutor;
import org.openslx.util.PrioThreadFactory;
import org.openslx.util.Util;

public class AsyncHashGenerator extends Thread {

	private static final Logger LOGGER = LogManager.getLogger(AsyncHashGenerator.class);

	private static final ThreadPoolExecutor HASH_WORK_POOL = new GrowingThreadPoolExecutor(1,
			Math.max(1, Runtime.getRuntime().availableProcessors() - 1),
			10, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(2),
			new PrioThreadFactory("HashGen"), new ThreadPoolExecutor.CallerRunsPolicy());

	private static final ThreadLocal<MessageDigest> SHA1_DIGESTER = new ThreadLocal<MessageDigest>() {
		@Override
		protected MessageDigest initialValue() {
			try {
				return MessageDigest.getInstance("SHA-1");
			} catch (NoSuchAlgorithmException e) {
				LOGGER.warn("No SHA-1 MD available. Cannot hash file", e);
				return null;
			}
		}
	};
	
	private String uploadToken = null;
	private int finishedChunks = 0;
	private final RandomAccessFile file;
	private final List<ByteBuffer> chunkHashes;
	private final List<FileChunk> chunkList;
	private long nextReadMsg, nextDoneMsg, nextSendingMsg; // for debug spam :-(

	private volatile boolean isCanceled = false;

	public AsyncHashGenerator(File uploadFile) throws FileNotFoundException, NoSuchAlgorithmException {
		try {
			file = new RandomAccessFile(uploadFile, "r");
		} catch (FileNotFoundException e) {
			LOGGER.warn("Could not open file for hash-checking. Will not send checksums to satellite", e);
			throw e;
		}
		LOGGER.debug("Opened file for hashing");
		chunkList = new ArrayList<>();
		FileChunk.createChunkList(chunkList, uploadFile.length(), null);
		chunkHashes = new ArrayList<>(chunkList.size());
		setDaemon(true);
		setName("HashGenerator");
	}

	public void setUploadToken(String token) {
		if (!isCanceled && this.uploadToken == null) {
			this.uploadToken = token;
			submitHashes(false);
		}
	}

	@Override
	public void run() {
		LOGGER.debug("Started hash reader worker");
		try {
			for (FileChunk chunk : chunkList) {
				if (isCanceled) {
					LOGGER.debug("Cancelled chunk reader (1)");
					break;
				}
				Block block;
				try {
					byte[] buffer;
					try {
						buffer = new byte[chunk.range.getLength()];
					} catch (OutOfMemoryError e) {
						LOGGER.info("Low memory - slowing down hashing");
						Util.sleep(5000);
						continue;
					}
					file.seek(chunk.range.startOffset);
					file.readFully(buffer);
					block = new Block(chunk, buffer);
				} catch (IOException e) {
					LOGGER.warn("Could not read file chunk " + chunk.getChunkIndex() + ", skipping", e);
					block = new Block(chunk, new byte[0]);
				}
				if (isCanceled) {
					LOGGER.debug("Cancelled chunk reader (2)");
					break;
				}
				//
				if (System.currentTimeMillis() > nextReadMsg) {
					nextReadMsg = System.currentTimeMillis() + 30000;
					LOGGER.debug("Read chunk " + chunk.getChunkIndex());
				}
				//
				for (;;) {
					if (HASH_WORK_POOL.isTerminating() || HASH_WORK_POOL.isTerminated() || HASH_WORK_POOL.isShutdown()) {
						LOGGER.warn("Aborting current hash job - pool has shut down");
						Thread.currentThread().interrupt();
						return;
					}
					try {
						HASH_WORK_POOL.execute(block);
						// Don't hash too furiously in the background if the upload didn't start yet
						if (uploadToken == null && chunk.getChunkIndex() > 4) {
							Util.sleep(200);
						}
					} catch (RejectedExecutionException e) {
						LOGGER.warn("Hash pool worker rejected a hash job!? Retrying...");
						Util.sleep(1000);
						continue;
					}
					break;
				}
			}
		} finally {
			Util.safeClose(file);
		}
	}

	public void cancel() {
		LOGGER.debug("Cancelled externally");
		isCanceled = true;
	}

	/**
	 * Worker for hashing chunk. Processed via thread pool.
	 */
	private class Block implements Runnable {
		public final FileChunk chunk;
		public final byte[] buffer;

		public Block(FileChunk chunk, byte[] buffer) {
			this.chunk = chunk;
			this.buffer = buffer;
		}

		@Override
		public void run() {
			MessageDigest digester = SHA1_DIGESTER.get();
			digester.update(buffer, 0, chunk.range.getLength());
			byte[] hash = digester.digest();
			hashDone(chunk, hash);
		}
	}

	/**
	 * Called by worker thread when a chunk has been hashed.
	 * This means this method is not running in the currentAsyncHashGenerator
	 * thread but one of the workers.
	 * 
	 * @param chunk
	 * @param hash
	 */
	private void hashDone(FileChunk chunk, byte[] hash) {
		int chunkIndex = chunk.getChunkIndex();
		boolean wasLastChunk = false;
		if (System.currentTimeMillis() > nextDoneMsg) {
			nextDoneMsg = System.currentTimeMillis() + 30000;
			LOGGER.debug("Done hashing chunk " + chunkIndex);
		}
		synchronized (chunkHashes) {
			while (chunkHashes.size() < chunkIndex) {
				chunkHashes.add(null);
			}
			if (chunkHashes.size() == chunkIndex) {
				chunkHashes.add(ByteBuffer.wrap(hash));
			} else {
				chunkHashes.set(chunkIndex, ByteBuffer.wrap(hash));
			}
			if (chunkIndex == finishedChunks) {
				while (finishedChunks < chunkHashes.size() && chunkHashes.get(finishedChunks) != null) {
					finishedChunks++;
					if (finishedChunks == chunkList.size()) {
						wasLastChunk = true;
					}
				}
			}
			if (chunkIndex + 1 == chunkList.size()) {
				LOGGER.debug("Hashed last chunk #" + chunkIndex + ", total=" + chunkList.size() + ", finished=" + finishedChunks);
			}
		}
		if (wasLastChunk) {
			// Last chunk was hashed - make sure list gets to the server
			// Try up to 10 times
			LOGGER.debug("Hashing done");
			for (int i = 0; i < 10; ++i) {
				if (submitHashes(true)) {
					LOGGER.debug("Hashes sent to server");
					break;
				}
				LOGGER.debug("Sending hashes failed...");
				if (!Util.sleep(2000))
					break; // Interrupted
			}
		} else if (chunkIndex % 20 == 0) {
			// Mid-hashing - update server side
			if (!submitHashes(false)) {
				LOGGER.warn("Server rejected partial block hash list");
				isCanceled = true;
			}
		}
	}

	/**
	 * Submit current list of hashes.
	 * 
	 * @return false if the token is not known to the server
	 */
	private boolean submitHashes(boolean mustSucceed) {
		List<ByteBuffer> subList;
		boolean d;
		synchronized (chunkHashes) {
			subList = new ArrayList<>( chunkHashes.subList(0, finishedChunks) );
			d = (finishedChunks == chunkList.size());
		}
		if (!d) {
			d = System.currentTimeMillis() > nextSendingMsg;
		}
		if (d) {
			nextSendingMsg = System.currentTimeMillis() + 30000;
			LOGGER.debug("Preparing to send hash list to server (" + subList.size() + " / " + (uploadToken != null) + ")");
		}
		if (uploadToken == null || subList.isEmpty()) // No token yet, cannot submit, or empty list
			return true;
		try {
			if (d) LOGGER.debug("Making updateBlockHashes call");
			ThriftManager.getSatClient().updateBlockHashes(uploadToken, subList, Session.getSatelliteToken());
			if (d) LOGGER.debug("updateBlockHashes call succeeded");
		} catch (TInvalidTokenException e) {
			LOGGER.warn("Cannot send hashList to satellite: Sat claims uploadToken is invalid!");
			isCanceled = true;
			return false;
		} catch (Exception e) {
			LOGGER.warn("Unknown exception when submitting hashList to sat", e);
			if (mustSucceed)
				return false;
		}
		return true;
	}

}