forked from icatproject/ids.server
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathIdsStreamingOutput.java
156 lines (140 loc) · 6.2 KB
/
IdsStreamingOutput.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
package org.icatproject.ids;
import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.zip.ZipEntry;
import java.util.zip.ZipException;
import java.util.zip.ZipOutputStream;
import jakarta.json.Json;
import jakarta.json.stream.JsonGenerator;
import jakarta.ws.rs.core.StreamingOutput;
import org.icatproject.ids.plugin.DsInfo;
import org.icatproject.ids.plugin.MainStorageInterface;
import org.icatproject.ids.plugin.ZipMapperInterface;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Class to handle the streaming of either a single file, or multiple files in
* a zip file, from the main storage area into an output stream for download.
*
* This is done on the fly, without having to write the zip file to disk first.
*
* The zip file will be compressed if the corresponding flag is set.
*/
public class IdsStreamingOutput implements StreamingOutput {
private static final Logger logger = LoggerFactory.getLogger(IdsStreamingOutput.class);
private static final int BUFSIZ = 2048;
private PropertyHandler propertyHandler;
private MainStorageInterface mainStorage;
private ZipMapperInterface zipMapper;
private long offset;
private boolean zip;
private Map<Long, DsInfo> dsInfos;
private boolean compress;
private Set<DfInfoImpl> dfInfos;
private Long transferId;
public IdsStreamingOutput(Map<Long, DsInfo> dsInfos, Set<DfInfoImpl> dfInfos,
long offset, boolean zip, boolean compress, Long transferId) {
propertyHandler = PropertyHandler.getInstance();
mainStorage = propertyHandler.getMainStorage();
zipMapper = propertyHandler.getZipMapper();
this.offset = offset;
this.zip = zip;
this.dsInfos = dsInfos;
this.dfInfos = dfInfos;
this.compress = compress;
this.transferId = transferId;
}
@Override
public void write(OutputStream output) throws IOException {
Object transfer = "??";
try {
if (offset != 0) { // Wrap the stream if needed
output = new RangeOutputStream(output, offset, null);
}
byte[] bytes = new byte[BUFSIZ];
if (zip) {
ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(output));
if (!compress) {
zos.setLevel(0); // Otherwise use default compression
}
// create an ordered set of missing files
Set<String> missingFiles = new TreeSet<>();
for (DfInfoImpl dfInfo : dfInfos) {
logger.debug("Adding {} to zip", dfInfo);
transfer = dfInfo;
DsInfo dsInfo = dsInfos.get(dfInfo.getDsId());
String entryName = zipMapper.getFullEntryName(dsInfo, dfInfo);
InputStream stream = null;
try {
stream = mainStorage.get(dfInfo.getDfLocation(), dfInfo.getCreateId(), dfInfo.getModId());
zos.putNextEntry(new ZipEntry(entryName));
int length;
while ((length = stream.read(bytes)) >= 0) {
zos.write(bytes, 0, length);
}
zos.closeEntry();
} catch (ZipException e) {
logger.debug("Skipped duplicate");
} catch (IOException e) {
logger.warn("Caught IOException {} {}", e.getClass().getSimpleName(), e.getMessage());
logger.warn("Skipping missing file in zip: {}", entryName);
missingFiles.add(entryName);
}
if (stream != null) {
stream.close();
}
}
if (!missingFiles.isEmpty()) {
// add a file to the zip file listing the missing files
String missingFilesZipEntryName = propertyHandler.getMissingFilesZipEntryName();
logger.debug("Adding missing files listing {} to zip", missingFilesZipEntryName);
StringBuilder sb = new StringBuilder();
sb.append("The following files were not found:").append("\n");
for (String filename : missingFiles) {
sb.append(filename).append("\n");
}
byte[] data = sb.toString().getBytes();
ZipEntry e = new ZipEntry(missingFilesZipEntryName);
zos.putNextEntry(e);
zos.write(data, 0, data.length);
zos.closeEntry();
}
zos.close();
} else {
DfInfoImpl dfInfo = dfInfos.iterator().next();
transfer = dfInfo;
InputStream stream = mainStorage.get(dfInfo.getDfLocation(), dfInfo.getCreateId(),
dfInfo.getModId());
int length;
while ((length = stream.read(bytes)) >= 0) {
output.write(bytes, 0, length);
}
output.close();
stream.close();
}
if (transferId != null) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) {
gen.write("transferId", transferId);
gen.writeEnd();
}
}
} catch (IOException e) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try (JsonGenerator gen = Json.createGenerator(baos).writeStartObject()) {
gen.write("transferId", transferId);
gen.write("exceptionClass", e.getClass().toString());
gen.write("exceptionMessage", e.getMessage());
gen.writeEnd();
}
logger.error("Failed to stream {} due to {}", transfer, e.getMessage());
throw e;
}
}
}