我正在使用编年史队列 5.16.8
我收到此警告
net.openhft.chronicle.threads.Pauser : Using Pauser.sleepy() as not enough processors, have 1, needs 8+
可以增加这个处理器吗?
源代码
我的方法是使用 Chronicle Map 来存储索引阅读器。我想,我可以在 recordHistory 上出现相同的行为......
我不得不使用 Jackson Json 转换...我不知道如何使用 writeDocument 方法。
While true 循环,这是我在这里的另一件不好的事情……我不知道如何找到队列中的最后一个索引。
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import net.openhft.chronicle.map.ChronicleMap;
import net.openhft.chronicle.map.ChronicleMapBuilder;
import net.openhft.chronicle.queue.ChronicleQueue;
import net.openhft.chronicle.queue.ExcerptAppender;
import net.openhft.chronicle.queue.ExcerptTailer;
import net.openhft.chronicle.queue.RollCycles;
import net.openhft.chronicle.queue.impl.single.SingleChronicleQueueBuilder;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import java.io.File;
import java.util.LinkedList;
import java.util.List;
@Service
public class QueueService {
public static final String INDEX = "index";
private ObjectMapper mapper = new ObjectMapper(); // json converter
@PostConstruct
public void init() {
mapper.registerModule(new JavaTimeModule());
mapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false);
}
public void write(List dtos, String path) throws Exception {
try (ChronicleQueue queue = SingleChronicleQueueBuilder.binary(path).rollCycle(RollCycles.DAILY).build()) {
final ExcerptAppender appender = queue.acquireAppender();
for (int i=0; i<dtos.size(); i++) {
appender.writeText(mapper.writeValueAsString(dtos.get(i)));
}
}
}
public void write(Object dto, String path) throws Exception {
try (ChronicleQueue queue = SingleChronicleQueueBuilder.binary(path).rollCycle(RollCycles.DAILY).build()) {
final ExcerptAppender appender = queue.acquireAppender();
appender.writeText(mapper.writeValueAsString(dto));
}
}
public List readList(String path, Class aClass) throws Exception {
List dtoList = new LinkedList<>();
try (ChronicleQueue queue = SingleChronicleQueueBuilder.binary(path).build()) {
final ExcerptTailer tailer = queue.createTailer();
ChronicleMap<String, Long> indexMap = getReaderIndexMap(queue.fileAbsolutePath());
if (indexMap.containsKey(INDEX)) {
tailer.moveToIndex(indexMap.get(INDEX));
}
while (true) { // something smart ?
String json = tailer.readText();
if (json == null) {
break;
} else {
dtoList.add(mapper.readValue(json, aClass));
}
}
indexMap.put(INDEX, tailer.index());
indexMap.close();
}
return dtoList;
}
public ChronicleMap<String, Long> getReaderIndexMap(String queueName) throws Exception {
ChronicleMapBuilder<String, Long> indexReaderMap = ChronicleMapBuilder.of(String.class, Long.class)
.name("index-reader-map")
.averageKey(INDEX)
.entries(1);
ChronicleMap<String, Long> map = indexReaderMap.createPersistedTo(new File(queueName+"/reader.idx"));
return map;
}
}