Remove docs for ActorPub/Sub and write persistence query docs as stages (#26844)
* use separate db columns * Use io dispatcher for sample stage
This commit is contained in:
parent
2bbf13f707
commit
c65bf2d276
22 changed files with 272 additions and 2194 deletions
|
|
@ -6,6 +6,7 @@ package jdocs.persistence;
|
|||
|
||||
import static akka.pattern.Patterns.ask;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.time.Duration;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
|
@ -21,9 +22,8 @@ import akka.stream.ActorMaterializer;
|
|||
import akka.stream.javadsl.Sink;
|
||||
import akka.stream.javadsl.Source;
|
||||
|
||||
import docs.persistence.query.MyEventsByTagPublisher;
|
||||
import jdocs.persistence.query.MyEventsByTagSource;
|
||||
import org.reactivestreams.Subscriber;
|
||||
import scala.concurrent.duration.FiniteDuration;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
|
@ -92,12 +92,11 @@ public class PersistenceQueryDocTest {
|
|||
akka.persistence.query.javadsl.PersistenceIdsQuery,
|
||||
akka.persistence.query.javadsl.CurrentPersistenceIdsQuery {
|
||||
|
||||
private final FiniteDuration refreshInterval;
|
||||
private final Duration refreshInterval;
|
||||
private Connection conn;
|
||||
|
||||
public MyJavadslReadJournal(ExtendedActorSystem system, Config config) {
|
||||
refreshInterval =
|
||||
FiniteDuration.create(
|
||||
config.getDuration("refresh-interval", TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS);
|
||||
refreshInterval = config.getDuration("refresh-interval");
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -115,10 +114,8 @@ public class PersistenceQueryDocTest {
|
|||
public Source<EventEnvelope, NotUsed> eventsByTag(String tag, Offset offset) {
|
||||
if (offset instanceof Sequence) {
|
||||
Sequence sequenceOffset = (Sequence) offset;
|
||||
final Props props =
|
||||
MyEventsByTagPublisher.props(tag, sequenceOffset.value(), refreshInterval);
|
||||
return Source.<EventEnvelope>actorPublisher(props)
|
||||
.mapMaterializedValue(m -> NotUsed.getInstance());
|
||||
return Source.fromGraph(
|
||||
new MyEventsByTagSource(conn, tag, sequenceOffset.value(), refreshInterval));
|
||||
} else if (offset == NoOffset.getInstance())
|
||||
return eventsByTag(tag, Offset.sequence(0L)); // recursive
|
||||
else
|
||||
|
|
|
|||
|
|
@ -1,135 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2015-2019 Lightbend Inc. <https://www.lightbend.com>
|
||||
*/
|
||||
|
||||
package jdocs.persistence.query;
|
||||
|
||||
import akka.actor.Cancellable;
|
||||
import akka.actor.Scheduler;
|
||||
import akka.japi.Pair;
|
||||
import akka.persistence.PersistentRepr;
|
||||
import akka.persistence.query.Offset;
|
||||
import akka.serialization.Serialization;
|
||||
import akka.serialization.SerializationExtension;
|
||||
import akka.stream.actor.AbstractActorPublisher;
|
||||
|
||||
import akka.actor.Props;
|
||||
import akka.persistence.query.EventEnvelope;
|
||||
import akka.stream.actor.ActorPublisherMessage.Cancel;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.time.Duration;
|
||||
|
||||
import static java.util.stream.Collectors.toList;
|
||||
|
||||
// #events-by-tag-publisher
|
||||
class MyEventsByTagJavaPublisher extends AbstractActorPublisher<EventEnvelope> {
|
||||
private final Serialization serialization = SerializationExtension.get(getContext().getSystem());
|
||||
|
||||
private final Connection connection;
|
||||
|
||||
private final String tag;
|
||||
|
||||
private final String CONTINUE = "CONTINUE";
|
||||
private final int LIMIT = 1000;
|
||||
private long currentOffset;
|
||||
private List<EventEnvelope> buf = new LinkedList<>();
|
||||
|
||||
private Cancellable continueTask;
|
||||
|
||||
public MyEventsByTagJavaPublisher(
|
||||
Connection connection, String tag, Long offset, Duration refreshInterval) {
|
||||
this.connection = connection;
|
||||
this.tag = tag;
|
||||
this.currentOffset = offset;
|
||||
|
||||
final Scheduler scheduler = getContext().getSystem().scheduler();
|
||||
this.continueTask =
|
||||
scheduler.schedule(
|
||||
refreshInterval,
|
||||
refreshInterval,
|
||||
getSelf(),
|
||||
CONTINUE,
|
||||
getContext().getDispatcher(),
|
||||
getSelf());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Receive createReceive() {
|
||||
return receiveBuilder()
|
||||
.matchEquals(
|
||||
CONTINUE,
|
||||
(in) -> {
|
||||
query();
|
||||
deliverBuf();
|
||||
})
|
||||
.match(
|
||||
Cancel.class,
|
||||
(in) -> {
|
||||
getContext().stop(getSelf());
|
||||
})
|
||||
.build();
|
||||
}
|
||||
|
||||
public static Props props(Connection conn, String tag, Long offset, Duration refreshInterval) {
|
||||
return Props.create(
|
||||
MyEventsByTagJavaPublisher.class,
|
||||
() -> new MyEventsByTagJavaPublisher(conn, tag, offset, refreshInterval));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void postStop() {
|
||||
continueTask.cancel();
|
||||
}
|
||||
|
||||
private void query() {
|
||||
if (buf.isEmpty()) {
|
||||
final String query =
|
||||
"SELECT id, persistent_repr "
|
||||
+ "FROM journal WHERE tag = ? AND id > ? "
|
||||
+ "ORDER BY id LIMIT ?";
|
||||
|
||||
try (PreparedStatement s = connection.prepareStatement(query)) {
|
||||
s.setString(1, tag);
|
||||
s.setLong(2, currentOffset);
|
||||
s.setLong(3, LIMIT);
|
||||
try (ResultSet rs = s.executeQuery()) {
|
||||
|
||||
final List<Pair<Long, byte[]>> res = new ArrayList<>(LIMIT);
|
||||
while (rs.next()) res.add(Pair.create(rs.getLong(1), rs.getBytes(2)));
|
||||
|
||||
if (!res.isEmpty()) {
|
||||
currentOffset = res.get(res.size() - 1).first();
|
||||
}
|
||||
|
||||
buf =
|
||||
res.stream()
|
||||
.map(
|
||||
in -> {
|
||||
final Long id = in.first();
|
||||
final byte[] bytes = in.second();
|
||||
|
||||
final PersistentRepr p =
|
||||
serialization.deserialize(bytes, PersistentRepr.class).get();
|
||||
|
||||
return new EventEnvelope(
|
||||
Offset.sequence(id), p.persistenceId(), p.sequenceNr(), p.payload());
|
||||
})
|
||||
.collect(toList());
|
||||
}
|
||||
} catch (Exception e) {
|
||||
onErrorThenStop(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void deliverBuf() {
|
||||
while (totalDemand() > 0 && !buf.isEmpty()) onNext(buf.remove(0));
|
||||
}
|
||||
}
|
||||
// #events-by-tag-publisher
|
||||
|
|
@ -0,0 +1,136 @@
|
|||
/*
|
||||
* Copyright (C) 2019 Lightbend Inc. <https://www.lightbend.com>
|
||||
*/
|
||||
|
||||
package jdocs.persistence.query;
|
||||
|
||||
import akka.actor.ActorSystem;
|
||||
import akka.japi.Pair;
|
||||
import akka.persistence.PersistentRepr;
|
||||
import akka.persistence.query.EventEnvelope;
|
||||
import akka.persistence.query.Offset;
|
||||
import akka.serialization.Serialization;
|
||||
import akka.serialization.SerializationExtension;
|
||||
import akka.stream.*;
|
||||
import akka.stream.stage.*;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.time.Duration;
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import static java.util.stream.Collectors.toList;
|
||||
|
||||
// #events-by-tag-publisher
|
||||
public class MyEventsByTagSource extends GraphStage<SourceShape<EventEnvelope>> {
|
||||
public Outlet<EventEnvelope> out = Outlet.create("MyEventByTagSource.out");
|
||||
private static final String QUERY =
|
||||
"SELECT id, persistence_id, seq_nr, serializer_id, serializer_manifest, payload "
|
||||
+ "FROM journal WHERE tag = ? AND id > ? "
|
||||
+ "ORDER BY id LIMIT ?";
|
||||
|
||||
enum Continue {
|
||||
INSTANCE;
|
||||
}
|
||||
|
||||
private static final int LIMIT = 1000;
|
||||
private final Connection connection;
|
||||
private final String tag;
|
||||
private final long initialOffset;
|
||||
private final Duration refreshInterval;
|
||||
|
||||
// assumes a shared connection, could also be a factory for creating connections/pool
|
||||
public MyEventsByTagSource(
|
||||
Connection connection, String tag, long initialOffset, Duration refreshInterval) {
|
||||
this.connection = connection;
|
||||
this.tag = tag;
|
||||
this.initialOffset = initialOffset;
|
||||
this.refreshInterval = refreshInterval;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Attributes initialAttributes() {
|
||||
return Attributes.apply(ActorAttributes.IODispatcher());
|
||||
}
|
||||
|
||||
@Override
|
||||
public SourceShape<EventEnvelope> shape() {
|
||||
return SourceShape.of(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public GraphStageLogic createLogic(Attributes inheritedAttributes) {
|
||||
return new TimerGraphStageLogic(shape()) {
|
||||
private ActorSystem system = ((ActorMaterializer) materializer()).system();
|
||||
private long currentOffset = initialOffset;
|
||||
private List<EventEnvelope> buf = new LinkedList<>();
|
||||
private final Serialization serialization = SerializationExtension.get(system);
|
||||
|
||||
@Override
|
||||
public void preStart() {
|
||||
schedulePeriodically(Continue.INSTANCE, refreshInterval);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTimer(Object timerKey) {
|
||||
query();
|
||||
deliver();
|
||||
}
|
||||
|
||||
private void deliver() {
|
||||
if (isAvailable(out) && !buf.isEmpty()) {
|
||||
push(out, buf.remove(0));
|
||||
}
|
||||
}
|
||||
|
||||
private void query() {
|
||||
if (buf.isEmpty()) {
|
||||
|
||||
try (PreparedStatement s = connection.prepareStatement(QUERY)) {
|
||||
s.setString(1, tag);
|
||||
s.setLong(2, currentOffset);
|
||||
s.setLong(3, LIMIT);
|
||||
try (ResultSet rs = s.executeQuery()) {
|
||||
final List<EventEnvelope> res = new ArrayList<>(LIMIT);
|
||||
while (rs.next()) {
|
||||
Object deserialized =
|
||||
serialization
|
||||
.deserialize(
|
||||
rs.getBytes("payload"),
|
||||
rs.getInt("serializer_id"),
|
||||
rs.getString("serializer_manifest"))
|
||||
.get();
|
||||
currentOffset = rs.getLong("id");
|
||||
res.add(
|
||||
new EventEnvelope(
|
||||
Offset.sequence(currentOffset),
|
||||
rs.getString("persistence_id"),
|
||||
rs.getLong("seq_nr"),
|
||||
deserialized));
|
||||
}
|
||||
buf = res;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
failStage(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
setHandler(
|
||||
out,
|
||||
new AbstractOutHandler() {
|
||||
@Override
|
||||
public void onPull() {
|
||||
query();
|
||||
deliver();
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
// #events-by-tag-publisher
|
||||
Loading…
Add table
Add a link
Reference in a new issue