From 7ce4d90caa1c7a9c09903a8d25bb9ea88a4adfd5 Mon Sep 17 00:00:00 2001 From: Pascal Precht <445106+PascalPrecht@users.noreply.github.com> Date: Mon, 21 Mar 2022 15:18:36 +0100 Subject: [PATCH] Introduce community history archive routine This introduces logic needed to: - Create WakuMessageArchives and and indices from store waku messages - History archive torrent data to disk and create .torrent file from that - Seed and unseed history archive torrents as necessary - Starting/stopping the torrent client - Enabling/disabling community history support for individual components and starting/stopping the routine intervals accordingly This does not yet handle magnet links (#2568) Closes #2567 --- appdatabase/migrations/bindata.go | 48 +- ...5_add_community_archives_info_table.up.sql | 6 + eth-node/types/topic.go | 17 + multiaccounts/accounts/database.go | 6 + protocol/communities/manager.go | 669 +++++++++++++++++- protocol/communities/manager_test.go | 426 ++++++++++- protocol/communities/persistence.go | 129 ++++ protocol/messenger.go | 26 +- protocol/messenger_communities.go | 294 ++++++++ protocol/messenger_config.go | 7 + protocol/messenger_mailserver.go | 23 +- protocol/messenger_mailserver_cycle.go | 16 + .../application_metadata_message.pb.go | 96 +-- .../application_metadata_message.proto | 1 + protocol/protobuf/communities.pb.go | 499 +++++++++++-- protocol/protobuf/communities.proto | 40 ++ services/ext/api.go | 8 + services/ext/signal.go | 28 + signal/events_community_archives.go | 87 +++ 19 files changed, 2273 insertions(+), 153 deletions(-) create mode 100644 appdatabase/migrations/sql/1647957715_add_community_archives_info_table.up.sql create mode 100644 signal/events_community_archives.go diff --git a/appdatabase/migrations/bindata.go b/appdatabase/migrations/bindata.go index 4cda9798731..5afa6660579 100644 --- a/appdatabase/migrations/bindata.go +++ b/appdatabase/migrations/bindata.go @@ -12,6 +12,7 @@ // 1647880168_add_torrent_config.up.sql (211B) // 1647882837_add_communities_settings_table.up.sql (206B) // 1647956635_add_waku_messages_table.up.sql (266B) +// 1647957715_add_community_archives_info_table.up.sql (194B) // doc.go (74B) package migrations @@ -321,6 +322,26 @@ func _1647956635_add_waku_messages_tableUpSql() (*asset, error) { return a, nil } +var __1647957715_add_community_archives_info_tableUpSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x74\x8d\xc1\x6a\xc4\x20\x18\x06\xef\x3e\xc5\x77\x6c\xa1\x87\xbe\x82\xb5\x7f\x20\xd4\x9a\x20\x7f\xa0\x39\x89\x18\x9b\x4a\xa2\x81\xea\x2e\xec\xdb\x2f\x7b\xc9\x6d\xcf\x33\xc3\x28\x4b\x92\x09\x2c\x3f\x34\x21\x1c\x39\x5f\x4a\x6a\x29\x56\xe7\xff\xc3\x5f\xba\x46\x97\xca\xef\x81\x17\x81\x13\xde\x5c\x5a\xc0\xf4\xc3\x18\x6d\xff\x2d\xed\x8c\x2f\x9a\x31\x18\xa8\xc1\x74\xba\x57\x0c\x4b\xa3\x96\x8a\xde\x04\x90\xfd\x5a\x62\xdb\x53\xd9\x5c\xd8\x8f\xb0\xa1\x37\x0c\x33\x30\xcc\xa4\x35\x3e\xa9\x93\x93\x66\xbc\x3f\xd4\xdd\xd7\xe6\x72\xac\xd5\xaf\xf1\xbc\xc7\xb2\xb8\xc5\xb7\xf8\xa4\x13\xaf\x42\xdc\x03\x00\x00\xff\xff\x2d\x4e\x11\xe6\xc2\x00\x00\x00") + +func _1647957715_add_community_archives_info_tableUpSqlBytes() ([]byte, error) { + return bindataRead( + __1647957715_add_community_archives_info_tableUpSql, + "1647957715_add_community_archives_info_table.up.sql", + ) +} + +func _1647957715_add_community_archives_info_tableUpSql() (*asset, error) { + bytes, err := _1647957715_add_community_archives_info_tableUpSqlBytes() + if err != nil { + return nil, err + } + + info := bindataFileInfo{name: "1647957715_add_community_archives_info_table.up.sql", size: 194, mode: os.FileMode(0664), modTime: time.Unix(1648217692, 0)} + a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x8a, 0xe9, 0xa9, 0x6c, 0xf2, 0xb5, 0xfe, 0x39, 0x66, 0x16, 0x44, 0xd6, 0x41, 0x60, 0x2d, 0x1a, 0x4, 0x3d, 0xf3, 0x3e, 0xb3, 0x14, 0xd0, 0xe5, 0x31, 0xc6, 0x99, 0x48, 0xf5, 0xc4, 0x76, 0xa}} + return a, nil +} + var _docGo = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x2c\xc9\xb1\x0d\xc4\x20\x0c\x05\xd0\x9e\x29\xfe\x02\xd8\xfd\x6d\xe3\x4b\xac\x2f\x44\x82\x09\x78\x7f\xa5\x49\xfd\xa6\x1d\xdd\xe8\xd8\xcf\x55\x8a\x2a\xe3\x47\x1f\xbe\x2c\x1d\x8c\xfa\x6f\xe3\xb4\x34\xd4\xd9\x89\xbb\x71\x59\xb6\x18\x1b\x35\x20\xa2\x9f\x0a\x03\xa2\xe5\x0d\x00\x00\xff\xff\x60\xcd\x06\xbe\x4a\x00\x00\x00") func docGoBytes() ([]byte, error) { @@ -456,6 +477,8 @@ var _bindata = map[string]func() (*asset, error){ "1647956635_add_waku_messages_table.up.sql": _1647956635_add_waku_messages_tableUpSql, + "1647957715_add_community_archives_info_table.up.sql": _1647957715_add_community_archives_info_tableUpSql, + "doc.go": docGo, } @@ -500,18 +523,19 @@ type bintree struct { } var _bintree = &bintree{nil, map[string]*bintree{ - "1640111208_dummy.up.sql": &bintree{_1640111208_dummyUpSql, map[string]*bintree{}}, - "1642666031_add_removed_clock_to_bookmarks.up.sql": &bintree{_1642666031_add_removed_clock_to_bookmarksUpSql, map[string]*bintree{}}, - "1643644541_gif_api_key_setting.up.sql": &bintree{_1643644541_gif_api_key_settingUpSql, map[string]*bintree{}}, - "1644188994_recent_stickers.up.sql": &bintree{_1644188994_recent_stickersUpSql, map[string]*bintree{}}, - "1646659233_add_address_to_dapp_permisssion.up.sql": &bintree{_1646659233_add_address_to_dapp_permisssionUpSql, map[string]*bintree{}}, - "1646841105_add_emoji_account.up.sql": &bintree{_1646841105_add_emoji_accountUpSql, map[string]*bintree{}}, - "1647278782_display_name.up.sql": &bintree{_1647278782_display_nameUpSql, map[string]*bintree{}}, - "1647862838_reset_last_backup.up.sql": &bintree{_1647862838_reset_last_backupUpSql, map[string]*bintree{}}, - "1647871652_add_settings_sync_clock_table.up.sql": &bintree{_1647871652_add_settings_sync_clock_tableUpSql, map[string]*bintree{}}, - "1647880168_add_torrent_config.up.sql": &bintree{_1647880168_add_torrent_configUpSql, map[string]*bintree{}}, - "1647882837_add_communities_settings_table.up.sql": &bintree{_1647882837_add_communities_settings_tableUpSql, map[string]*bintree{}}, - "1647956635_add_waku_messages_table.up.sql": &bintree{_1647956635_add_waku_messages_tableUpSql, map[string]*bintree{}}, + "1640111208_dummy.up.sql": &bintree{_1640111208_dummyUpSql, map[string]*bintree{}}, + "1642666031_add_removed_clock_to_bookmarks.up.sql": &bintree{_1642666031_add_removed_clock_to_bookmarksUpSql, map[string]*bintree{}}, + "1643644541_gif_api_key_setting.up.sql": &bintree{_1643644541_gif_api_key_settingUpSql, map[string]*bintree{}}, + "1644188994_recent_stickers.up.sql": &bintree{_1644188994_recent_stickersUpSql, map[string]*bintree{}}, + "1646659233_add_address_to_dapp_permisssion.up.sql": &bintree{_1646659233_add_address_to_dapp_permisssionUpSql, map[string]*bintree{}}, + "1646841105_add_emoji_account.up.sql": &bintree{_1646841105_add_emoji_accountUpSql, map[string]*bintree{}}, + "1647278782_display_name.up.sql": &bintree{_1647278782_display_nameUpSql, map[string]*bintree{}}, + "1647862838_reset_last_backup.up.sql": &bintree{_1647862838_reset_last_backupUpSql, map[string]*bintree{}}, + "1647871652_add_settings_sync_clock_table.up.sql": &bintree{_1647871652_add_settings_sync_clock_tableUpSql, map[string]*bintree{}}, + "1647880168_add_torrent_config.up.sql": &bintree{_1647880168_add_torrent_configUpSql, map[string]*bintree{}}, + "1647882837_add_communities_settings_table.up.sql": &bintree{_1647882837_add_communities_settings_tableUpSql, map[string]*bintree{}}, + "1647956635_add_waku_messages_table.up.sql": &bintree{_1647956635_add_waku_messages_tableUpSql, map[string]*bintree{}}, + "1647957715_add_community_archives_info_table.up.sql": &bintree{_1647957715_add_community_archives_info_tableUpSql, map[string]*bintree{}}, "doc.go": &bintree{docGo, map[string]*bintree{}}, }} diff --git a/appdatabase/migrations/sql/1647957715_add_community_archives_info_table.up.sql b/appdatabase/migrations/sql/1647957715_add_community_archives_info_table.up.sql new file mode 100644 index 00000000000..c8cc82481b2 --- /dev/null +++ b/appdatabase/migrations/sql/1647957715_add_community_archives_info_table.up.sql @@ -0,0 +1,6 @@ +CREATE TABLE communities_archive_info ( + community_id TEXT PRIMARY KEY ON CONFLICT REPLACE, + magnetlink_clock INT NOT NULL DEFAULT 0, + last_message_archive_end_date INT NOT NULL DEFAULT 0 +) + diff --git a/eth-node/types/topic.go b/eth-node/types/topic.go index 0b1cbed9301..218fc443c38 100644 --- a/eth-node/types/topic.go +++ b/eth-node/types/topic.go @@ -1,5 +1,9 @@ package types +import ( + "github.com/ethereum/go-ethereum/common/hexutil" +) + const ( // TopicLength is the expected length of the topic, in bytes TopicLength = 4 @@ -84,3 +88,16 @@ func MakeFullNodeBloom() []byte { } return bloom } + +func StringToTopic(s string) (t TopicType) { + str, _ := hexutil.Decode(s) + return BytesToTopic(str) +} + +func TopicTypeToByteArray(t TopicType) []byte { + topic := make([]byte, 4) + for i, b := range t { + topic[i] = b + } + return topic +} diff --git a/multiaccounts/accounts/database.go b/multiaccounts/accounts/database.go index d147e4d9bf6..7facf5d5fc4 100644 --- a/multiaccounts/accounts/database.go +++ b/multiaccounts/accounts/database.go @@ -6,6 +6,8 @@ import ( "github.com/status-im/status-go/eth-node/types" "github.com/status-im/status-go/multiaccounts/errors" "github.com/status-im/status-go/multiaccounts/settings" + "github.com/status-im/status-go/nodecfg" + "github.com/status-im/status-go/params" ) const ( @@ -214,3 +216,7 @@ func (db *Database) AddressExists(address types.Address) (exists bool, err error err = db.db.QueryRow("SELECT EXISTS (SELECT 1 FROM accounts WHERE address = ?)", address).Scan(&exists) return exists, err } + +func (db *Database) GetNodeConfig() (*params.NodeConfig, error) { + return nodecfg.GetNodeConfig(db.db) +} diff --git a/protocol/communities/manager.go b/protocol/communities/manager.go index b88181dd824..c5459e69aba 100644 --- a/protocol/communities/manager.go +++ b/protocol/communities/manager.go @@ -4,12 +4,15 @@ import ( "crypto/ecdsa" "database/sql" "fmt" + "log" + "os" "strings" + "sync" "time" - _ "github.com/anacrolix/torrent" - _ "github.com/anacrolix/torrent/bencode" - _ "github.com/anacrolix/torrent/metainfo" + "github.com/anacrolix/torrent" + "github.com/anacrolix/torrent/bencode" + "github.com/anacrolix/torrent/metainfo" "github.com/golang/protobuf/proto" "github.com/google/uuid" @@ -18,23 +21,38 @@ import ( "github.com/status-im/status-go/eth-node/crypto" "github.com/status-im/status-go/eth-node/types" + "github.com/status-im/status-go/params" "github.com/status-im/status-go/protocol/common" "github.com/status-im/status-go/protocol/ens" "github.com/status-im/status-go/protocol/protobuf" "github.com/status-im/status-go/protocol/requests" + "github.com/status-im/status-go/protocol/transport" + "github.com/status-im/status-go/signal" ) +var defaultAnnounceList = [][]string{ + {"udp://tracker.opentrackr.org:1337/announce"}, + {"udp://tracker.openbittorrent.com:6969/announce"}, +} +var pieceLength = 100 * 1024 + type Manager struct { - persistence *Persistence - ensSubscription chan []*ens.VerificationRecord - subscriptions []chan *Subscription - ensVerifier *ens.Verifier - identity *ecdsa.PublicKey - logger *zap.Logger - quit chan struct{} + persistence *Persistence + ensSubscription chan []*ens.VerificationRecord + subscriptions []chan *Subscription + ensVerifier *ens.Verifier + identity *ecdsa.PublicKey + logger *zap.Logger + transport *transport.Transport + quit chan struct{} + torrentConfig *params.TorrentConfig + torrentClient *torrent.Client + historyArchiveTasksWaitGroup sync.WaitGroup + historyArchiveTasks map[string]chan struct{} + torrentTasks map[string]metainfo.Hash } -func NewManager(identity *ecdsa.PublicKey, db *sql.DB, logger *zap.Logger, verifier *ens.Verifier) (*Manager, error) { +func NewManager(identity *ecdsa.PublicKey, db *sql.DB, logger *zap.Logger, verifier *ens.Verifier, transport *transport.Transport, torrentConfig *params.TorrentConfig) (*Manager, error) { if identity == nil { return nil, errors.New("empty identity") } @@ -47,9 +65,13 @@ func NewManager(identity *ecdsa.PublicKey, db *sql.DB, logger *zap.Logger, verif } manager := &Manager{ - logger: logger, - identity: identity, - quit: make(chan struct{}), + logger: logger, + identity: identity, + quit: make(chan struct{}), + transport: transport, + torrentConfig: torrentConfig, + historyArchiveTasks: make(map[string]chan struct{}), + torrentTasks: make(map[string]metainfo.Hash), persistence: &Persistence{ logger: logger, db: db, @@ -67,8 +89,13 @@ func NewManager(identity *ecdsa.PublicKey, db *sql.DB, logger *zap.Logger, verif } type Subscription struct { - Community *Community - Invitations []*protobuf.CommunityInvitation + Community *Community + Invitations []*protobuf.CommunityInvitation + CreatingHistoryArchivesSignal *signal.CreatingHistoryArchivesSignal + HistoryArchivesCreatedSignal *signal.HistoryArchivesCreatedSignal + NoHistoryArchivesCreatedSignal *signal.NoHistoryArchivesCreatedSignal + HistoryArchivesSeedingSignal *signal.HistoryArchivesSeedingSignal + HistoryArchivesUnseededSignal *signal.HistoryArchivesUnseededSignal } type CommunityResponse struct { @@ -86,6 +113,12 @@ func (m *Manager) Start() error { if m.ensVerifier != nil { m.runENSVerificationLoop() } + + if m.torrentConfig != nil && m.torrentConfig.Enabled { + err := m.StartTorrentClient() + return err + } + return nil } @@ -113,9 +146,60 @@ func (m *Manager) Stop() error { for _, c := range m.subscriptions { close(c) } + m.StopTorrentClient() + return nil +} + +func (m *Manager) SetTorrentConfig(config *params.TorrentConfig) { + m.torrentConfig = config +} + +func (m *Manager) StartTorrentClient() error { + if m.TorrentClientStarted() { + return nil + } + + config := torrent.NewDefaultClientConfig() + config.SetListenAddr(":" + fmt.Sprint(m.torrentConfig.Port)) + config.Seed = true + + config.DataDir = m.torrentConfig.DataDir + + if _, err := os.Stat(m.torrentConfig.DataDir); os.IsNotExist(err) { + err := os.MkdirAll(m.torrentConfig.DataDir, 0700) + if err != nil { + return err + } + } + + log.Println("Starting torrent client at port: ", m.torrentConfig.Port) + // Instantiating the client will make it bootstrap and listen eagerly, + // so no go routine is needed here + client, err := torrent.NewClient(config) + if err != nil { + return err + } + m.torrentClient = client return nil } +func (m *Manager) StopTorrentClient() []error { + if m.TorrentClientStarted() { + m.StopHistoryArchiveTasksIntervals() + log.Println("Stopping torrent client") + errs := m.torrentClient.Close() + if len(errs) > 0 { + return errs + } + m.torrentClient = nil + } + return make([]error, 0) +} + +func (m *Manager) TorrentClientStarted() bool { + return m.torrentClient != nil +} + func (m *Manager) publish(subscription *Subscription) { for _, s := range m.subscriptions { select { @@ -721,6 +805,23 @@ func (m *Manager) JoinCommunity(id types.HexBytes) (*Community, error) { return community, nil } +func (m *Manager) GetMagnetlinkMessageClock(communityID types.HexBytes) (uint64, error) { + return m.persistence.GetMagnetlinkMessageClock(communityID) +} + +func (m *Manager) UpdateCommunityDescriptionMagnetlinkMessageClock(communityID types.HexBytes, clock uint64) error { + community, err := m.GetByIDString(communityID.String()) + if err != nil { + return err + } + community.config.CommunityDescription.MagnetlinkClock = clock + return m.persistence.SaveCommunity(community) +} + +func (m *Manager) UpdateMagnetlinkMessageClock(communityID types.HexBytes, clock uint64) error { + return m.persistence.UpdateMagnetlinkMessageClock(communityID, clock) +} + func (m *Manager) LeaveCommunity(id types.HexBytes) (*Community, error) { community, err := m.GetByID(id) if err != nil { @@ -961,6 +1062,542 @@ func (m *Manager) GetAdminCommunitiesChatIDs() (map[string]bool, error) { return chatIDs, nil } +func (m *Manager) GetCommunityChatsFilters(communityID types.HexBytes) ([]*transport.Filter, error) { + chatIDs, err := m.persistence.GetCommunityChatIDs(communityID) + if err != nil { + return nil, err + } + + filters := []*transport.Filter{} + for _, cid := range chatIDs { + filters = append(filters, m.transport.FilterByChatID(cid)) + } + return filters, nil +} + +func (m *Manager) GetCommunityChatsTopics(communityID types.HexBytes) ([]types.TopicType, error) { + filters, err := m.GetCommunityChatsFilters(communityID) + if err != nil { + return nil, err + } + + topics := []types.TopicType{} + for _, filter := range filters { + topics = append(topics, filter.Topic) + } + + return topics, nil +} + func (m *Manager) StoreWakuMessage(message *types.Message) error { return m.persistence.SaveWakuMessage(message) } + +func (m *Manager) GetLatestWakuMessageTimestamp(topics []types.TopicType) (uint64, error) { + return m.persistence.GetLatestWakuMessageTimestamp(topics) +} + +func (m *Manager) GetOldestWakuMessageTimestamp(topics []types.TopicType) (uint64, error) { + return m.persistence.GetOldestWakuMessageTimestamp(topics) +} + +func (m *Manager) GetLastMessageArchiveEndDate(communityID types.HexBytes) (uint64, error) { + return m.persistence.GetLastMessageArchiveEndDate(communityID) +} + +func (m *Manager) GetHistoryArchivePartitionStartTimestamp(communityID types.HexBytes) (uint64, error) { + filters, err := m.GetCommunityChatsFilters(communityID) + if err != nil { + m.logger.Warn("failed to get community chats filters", zap.Error(err)) + return 0, err + } + + if len(filters) == 0 { + // If we don't have chat filters, we likely don't have any chats + // associated to this community, which means there's nothing more + // to do here + return 0, nil + } + + topics := []types.TopicType{} + + for _, filter := range filters { + topics = append(topics, filter.Topic) + } + + lastArchiveEndDateTimestamp, err := m.GetLastMessageArchiveEndDate(communityID) + if err != nil { + m.logger.Debug("failed to get last archive end date", zap.Error(err)) + return 0, err + } + + if lastArchiveEndDateTimestamp == 0 { + // If we don't have a tracked last message archive end date, it + // means we haven't created an archive before, which means + // the next thing to look at is the oldest waku message timestamp for + // this community + lastArchiveEndDateTimestamp, err = m.GetOldestWakuMessageTimestamp(topics) + if err != nil { + m.logger.Warn("failed to get oldest waku message timestamp", zap.Error(err)) + return 0, err + } + if lastArchiveEndDateTimestamp == 0 { + // This means there's no waku message stored for this community so far + // (even after requesting possibly missed messages), so no messages exist yet that can be archived + return 0, nil + } + } + + return lastArchiveEndDateTimestamp, nil +} + +func (m *Manager) CreateAndSeedHistoryArchive(communityID types.HexBytes, topics []types.TopicType, startDate time.Time, endDate time.Time, partition time.Duration) error { + m.UnseedHistoryArchiveTorrent(communityID) + err := m.CreateHistoryArchiveTorrent(communityID, topics, startDate, endDate, partition) + if err != nil { + return err + } + return m.SeedHistoryArchiveTorrent(communityID) +} + +func (m *Manager) StartHistoryArchiveTasksInterval(community *Community, interval time.Duration) { + id := community.IDString() + _, exists := m.historyArchiveTasks[id] + + if exists { + log.Println("History archive tasks interval already runs for community: ", id) + m.logger.Debug("History archive tasks interval already runs for community: ", zap.Any("id", id)) + return + } + + cancel := make(chan struct{}) + m.historyArchiveTasks[id] = cancel + m.historyArchiveTasksWaitGroup.Add(1) + + ticker := time.NewTicker(interval) + defer ticker.Stop() + + log.Println("Starting history archive tasks interval for community: ", id) + m.logger.Debug("Starting history archive tasks interval for community: ", zap.Any("id", id)) + for { + select { + case <-ticker.C: + log.Println("Executing history archive tasks for community: ", id) + lastArchiveEndDateTimestamp, err := m.GetHistoryArchivePartitionStartTimestamp(community.ID()) + if err != nil { + m.logger.Debug("failed to get last archive end date", zap.Error(err)) + continue + } + + if lastArchiveEndDateTimestamp == 0 { + // This means there are no waku messages for this community, + // so nothing to do here + continue + } + + topics, err := m.GetCommunityChatsTopics(community.ID()) + if err != nil { + m.logger.Debug("failed to get community chats topics", zap.Error(err)) + continue + } + + ts := time.Now().Unix() + to := time.Unix(ts, 0) + lastArchiveEndDate := time.Unix(int64(lastArchiveEndDateTimestamp), 0) + + err = m.CreateAndSeedHistoryArchive(community.ID(), topics, lastArchiveEndDate, to, interval) + if err != nil { + m.logger.Debug("failed to create and seed history archive", zap.Error(err)) + continue + } + case <-cancel: + m.UnseedHistoryArchiveTorrent(community.ID()) + delete(m.historyArchiveTasks, id) + m.historyArchiveTasksWaitGroup.Done() + return + } + } +} + +func (m *Manager) StopHistoryArchiveTasksIntervals() { + for _, t := range m.historyArchiveTasks { + close(t) + } + // Stoping archive interval tasks is async, so we need + // to wait for all of them to be closed before we shutdown + // the torrent client + m.historyArchiveTasksWaitGroup.Wait() +} + +func (m *Manager) StopHistoryArchiveTasksInterval(communityID types.HexBytes) { + task, ok := m.historyArchiveTasks[communityID.String()] + if ok { + log.Println("Stopping history archive tasks interval for community: ", communityID.String()) + close(task) + } +} + +type EncodedArchiveData struct { + padding int + bytes []byte +} + +func (m *Manager) CreateHistoryArchiveTorrent(communityID types.HexBytes, topics []types.TopicType, startDate time.Time, endDate time.Time, partition time.Duration) error { + + from := startDate + to := from.Add(partition) + if to.After(endDate) { + to = endDate + } + + archiveDir := m.torrentConfig.DataDir + "/" + communityID.String() + torrentDir := m.torrentConfig.TorrentDir + indexPath := archiveDir + "/index" + dataPath := archiveDir + "/data" + + wakuMessageArchiveIndexProto := &protobuf.WakuMessageArchiveIndex{} + wakuMessageArchiveIndex := make(map[string]*protobuf.WakuMessageArchiveIndexMetadata) + + if _, err := os.Stat(archiveDir); os.IsNotExist(err) { + err := os.MkdirAll(archiveDir, 0700) + if err != nil { + return err + } + } + if _, err := os.Stat(torrentDir); os.IsNotExist(err) { + err := os.MkdirAll(torrentDir, 0700) + if err != nil { + return err + } + } + + _, err := os.Stat(indexPath) + if err == nil { + wakuMessageArchiveIndexProto, err = m.loadHistoryArchiveIndexFromFile(communityID) + if err != nil { + return err + } + } + + var offset uint64 = 0 + + for hash, metadata := range wakuMessageArchiveIndexProto.Archives { + offset = offset + metadata.Size + wakuMessageArchiveIndex[hash] = metadata + } + + var encodedArchives []*EncodedArchiveData + topicsAsByteArrays := topicsAsByteArrays(topics) + + m.publish(&Subscription{CreatingHistoryArchivesSignal: &signal.CreatingHistoryArchivesSignal{ + CommunityID: communityID.String(), + }}) + + log.Println("Creating archives for start date: ", startDate, " till: ", endDate, " partitioned by ", partition) + for { + if from.Equal(endDate) || from.After(endDate) { + break + } + log.Println("Creating message archive for partition (", partition, "). From: ", from, " To: ", to) + messages, err := m.persistence.GetWakuMessagesByFilterTopic(topics, uint64(from.Unix()), uint64(to.Unix())) + if err != nil { + return err + } + + if len(messages) == 0 { + // No need to create an archive with zero messages + log.Println("No messages in this partition") + from = to + to = to.Add(partition) + if to.After(endDate) { + to = endDate + } + continue + } + + wakuMessageArchive := m.createWakuMessageArchive(from, to, messages, topicsAsByteArrays) + encodedArchive, err := proto.Marshal(wakuMessageArchive) + if err != nil { + return err + } + + rawSize := len(encodedArchive) + padding := 0 + size := 0 + + if rawSize > pieceLength { + size = rawSize + pieceLength - (rawSize % pieceLength) + padding = size - rawSize + } else { + padding = pieceLength - rawSize + size = rawSize + padding + } + + wakuMessageArchiveIndexMetadata := &protobuf.WakuMessageArchiveIndexMetadata{ + Metadata: wakuMessageArchive.Metadata, + Offset: offset, + Size: uint64(size), + Padding: uint64(padding), + } + + wakuMessageArchiveIndexMetadataBytes, err := proto.Marshal(wakuMessageArchiveIndexMetadata) + if err != nil { + return err + } + + wakuMessageArchiveIndex[crypto.Keccak256Hash(wakuMessageArchiveIndexMetadataBytes).String()] = wakuMessageArchiveIndexMetadata + encodedArchives = append(encodedArchives, &EncodedArchiveData{bytes: encodedArchive, padding: padding}) + from = to + to = to.Add(partition) + if to.After(endDate) { + to = endDate + } + offset = offset + uint64(rawSize) + uint64(padding) + } + + if len(encodedArchives) > 0 { + + dataBytes := make([]byte, 0) + if _, err := os.Stat(dataPath); err == nil { + dataBytes, err = os.ReadFile(dataPath) + if err != nil { + return err + } + } + + for _, encodedArchiveData := range encodedArchives { + dataBytes = append(dataBytes, encodedArchiveData.bytes...) + dataBytes = append(dataBytes, make([]byte, encodedArchiveData.padding)...) + } + + wakuMessageArchiveIndexProto.Archives = wakuMessageArchiveIndex + indexBytes, err := proto.Marshal(wakuMessageArchiveIndexProto) + if err != nil { + return err + } + + err = os.WriteFile(indexPath, indexBytes, 0644) // nolint: gosec + if err != nil { + return err + } + + err = os.WriteFile(dataPath, dataBytes, 0644) // nolint: gosec + if err != nil { + return err + } + + metaInfo := metainfo.MetaInfo{ + AnnounceList: defaultAnnounceList, + } + metaInfo.SetDefaults() + metaInfo.CreatedBy = common.PubkeyToHex(m.identity) + + info := metainfo.Info{ + PieceLength: int64(pieceLength), + } + + err = info.BuildFromFilePath(archiveDir) + if err != nil { + return err + } + + metaInfo.InfoBytes, err = bencode.Marshal(info) + if err != nil { + return err + } + + metaInfoBytes, err := bencode.Marshal(metaInfo) + if err != nil { + return err + } + + err = os.WriteFile(m.torrentFile(communityID.String()), metaInfoBytes, 0644) // nolint: gosec + if err != nil { + return err + } + + m.publish(&Subscription{ + HistoryArchivesCreatedSignal: &signal.HistoryArchivesCreatedSignal{ + CommunityID: communityID.String(), + From: int(startDate.Unix()), + To: int(endDate.Unix()), + }, + }) + } else { + log.Println("No archives created") + m.publish(&Subscription{ + NoHistoryArchivesCreatedSignal: &signal.NoHistoryArchivesCreatedSignal{ + CommunityID: communityID.String(), + From: int(startDate.Unix()), + To: int(endDate.Unix()), + }, + }) + } + + lastMessageArchiveEndDate, err := m.persistence.GetLastMessageArchiveEndDate(communityID) + if err != nil { + return err + } + + if lastMessageArchiveEndDate > 0 { + err = m.persistence.UpdateLastMessageArchiveEndDate(communityID, uint64(from.Unix())) + } else { + err = m.persistence.SaveLastMessageArchiveEndDate(communityID, uint64(from.Unix())) + } + if err != nil { + return err + } + + log.Println("History archive created/updated for community: ", communityID.String()) + return nil +} + +func (m *Manager) SeedHistoryArchiveTorrent(communityID types.HexBytes) error { + m.UnseedHistoryArchiveTorrent(communityID) + + id := communityID.String() + torrentFile := m.torrentFile(id) + + metaInfo, err := metainfo.LoadFromFile(torrentFile) + if err != nil { + return err + } + + info, err := metaInfo.UnmarshalInfo() + if err != nil { + return err + } + + hash := metaInfo.HashInfoBytes() + m.torrentTasks[id] = hash + + if err != nil { + return err + } + + torrent, err := m.torrentClient.AddTorrent(metaInfo) + if err != nil { + return err + } + torrent.DownloadAll() + + m.publish(&Subscription{ + HistoryArchivesSeedingSignal: &signal.HistoryArchivesSeedingSignal{ + CommunityID: communityID.String(), + }, + }) + log.Println("Seeding torrent for community: ", id) + log.Println("Magnetlink: ", metaInfo.Magnet(nil, &info).String()) + return nil +} + +func (m *Manager) UnseedHistoryArchiveTorrent(communityID types.HexBytes) { + id := communityID.String() + hash, exists := m.torrentTasks[id] + + if exists { + torrent, ok := m.torrentClient.Torrent(hash) + if ok { + log.Println("Unseeding and dropping torrent for community: ", id) + m.logger.Debug("Unseeding and dropping torrent for community: ", zap.Any("id", id)) + torrent.Drop() + delete(m.torrentTasks, id) + + m.publish(&Subscription{ + HistoryArchivesUnseededSignal: &signal.HistoryArchivesUnseededSignal{ + CommunityID: id, + }, + }) + } + } +} + +func (m *Manager) IsSeedingHistoryArchiveTorrent(communityID types.HexBytes) bool { + id := communityID.String() + hash := m.torrentTasks[id] + torrent, ok := m.torrentClient.Torrent(hash) + return ok && torrent.Seeding() +} + +func (m *Manager) GetHistoryArchiveMagnetlink(communityID types.HexBytes) (string, error) { + id := communityID.String() + torrentFile := m.torrentFile(id) + + metaInfo, err := metainfo.LoadFromFile(torrentFile) + if err != nil { + return "", err + } + + info, err := metaInfo.UnmarshalInfo() + if err != nil { + return "", err + } + + return metaInfo.Magnet(nil, &info).String(), nil +} + +func (m *Manager) createWakuMessageArchive(from time.Time, to time.Time, messages []types.Message, topics [][]byte) *protobuf.WakuMessageArchive { + var wakuMessages []*protobuf.WakuMessage + + for _, msg := range messages { + topic := types.TopicTypeToByteArray(msg.Topic) + wakuMessage := &protobuf.WakuMessage{ + Sig: msg.Sig, + Timestamp: uint64(msg.Timestamp), + Topic: topic, + Payload: msg.Payload, + Padding: msg.Padding, + Hash: msg.Hash, + } + wakuMessages = append(wakuMessages, wakuMessage) + } + + metadata := protobuf.WakuMessageArchiveMetadata{ + From: uint64(from.Unix()), + To: uint64(to.Unix()), + ContentTopic: topics, + } + + wakuMessageArchive := &protobuf.WakuMessageArchive{ + Metadata: &metadata, + Messages: wakuMessages, + } + return wakuMessageArchive +} + +func (m *Manager) loadHistoryArchiveIndexFromFile(communityID types.HexBytes) (*protobuf.WakuMessageArchiveIndex, error) { + wakuMessageArchiveIndexProto := &protobuf.WakuMessageArchiveIndex{} + + indexPath := m.archiveIndexFile(communityID.String()) + indexData, err := os.ReadFile(indexPath) + if err != nil { + return nil, err + } + + err = proto.Unmarshal(indexData, wakuMessageArchiveIndexProto) + if err != nil { + return nil, err + } + return wakuMessageArchiveIndexProto, nil +} + +func (m *Manager) torrentFile(communityID string) string { + return m.torrentConfig.TorrentDir + "/" + communityID + ".torrent" +} + +func (m *Manager) archiveIndexFile(communityID string) string { + return m.torrentConfig.DataDir + "/" + communityID + "/index" +} + +func (m *Manager) archiveDataFile(communityID string) string { + return m.torrentConfig.DataDir + "/" + communityID + "/data" +} + +func topicsAsByteArrays(topics []types.TopicType) [][]byte { + var topicsAsByteArrays [][]byte + for _, t := range topics { + topic := types.TopicTypeToByteArray(t) + topicsAsByteArrays = append(topicsAsByteArrays, topic) + } + return topicsAsByteArrays +} diff --git a/protocol/communities/manager_test.go b/protocol/communities/manager_test.go index 92aec790de3..8c0ddcea5ad 100644 --- a/protocol/communities/manager_test.go +++ b/protocol/communities/manager_test.go @@ -2,9 +2,16 @@ package communities import ( "bytes" + "io/ioutil" + "os" "testing" + "time" + "github.com/status-im/status-go/appdatabase" + "github.com/status-im/status-go/eth-node/types" + "github.com/status-im/status-go/params" "github.com/status-im/status-go/protocol/requests" + "github.com/status-im/status-go/protocol/transport" "github.com/golang/protobuf/proto" _ "github.com/mutecomm/go-sqlcipher" // require go-sqlcipher that overrides default implementation @@ -26,12 +33,17 @@ type ManagerSuite struct { } func (s *ManagerSuite) SetupTest() { - db, err := sqlite.OpenInMemory() - s.Require().NoError(err) + dbPath, err := ioutil.TempFile("", "") + s.NoError(err, "creating temp file for db") + db, err := appdatabase.InitializeDB(dbPath.Name(), "") + s.NoError(err, "creating sqlite db instance") + err = sqlite.Migrate(db) + s.NoError(err, "protocol migrate") + key, err := crypto.GenerateKey() s.Require().NoError(err) s.Require().NoError(err) - m, err := NewManager(&key.PublicKey, db, nil, nil) + m, err := NewManager(&key.PublicKey, db, nil, nil, nil, nil) s.Require().NoError(err) s.Require().NoError(m.Start()) s.manager = m @@ -107,16 +119,400 @@ func (s *ManagerSuite) TestEditCommunity() { func (s *ManagerSuite) TestGetAdminCommuniesChatIDs() { + community, _, err := s.buildCommunityWithChat() + s.Require().NoError(err) + s.Require().NotNil(community) + + adminChatIDs, err := s.manager.GetAdminCommunitiesChatIDs() + s.Require().NoError(err) + s.Require().Len(adminChatIDs, 1) +} + +func (s *ManagerSuite) TestStartAndStopTorrentClient() { + torrentConfig := buildTorrentConfig() + s.manager.SetTorrentConfig(&torrentConfig) + + err := s.manager.StartTorrentClient() + s.Require().NoError(err) + s.Require().NotNil(s.manager.torrentClient) + defer s.manager.StopTorrentClient() + + _, err = os.Stat(torrentConfig.DataDir) + s.Require().NoError(err) + s.Require().Equal(s.manager.TorrentClientStarted(), true) +} + +func (s *ManagerSuite) TestStartHistoryArchiveTasksInterval() { + + torrentConfig := buildTorrentConfig() + s.manager.SetTorrentConfig(&torrentConfig) + + err := s.manager.StartTorrentClient() + s.Require().NoError(err) + defer s.manager.StopTorrentClient() + + community, _, err := s.buildCommunityWithChat() + s.Require().NoError(err) + + interval := 10 * time.Second + go s.manager.StartHistoryArchiveTasksInterval(community, interval) + // Due to async exec we need to wait a bit until we check + // the task count. + time.Sleep(5 * time.Second) + s.Require().Len(s.manager.historyArchiveTasks, 1) + + // We wait another 5 seconds to ensure the first tick has kicked in + time.Sleep(5 * time.Second) + + _, err = os.Stat(s.manager.torrentFile(community.IDString())) + s.Require().Error(err) + + s.manager.StopHistoryArchiveTasksInterval(community.ID()) + s.manager.historyArchiveTasksWaitGroup.Wait() + s.Require().Len(s.manager.historyArchiveTasks, 0) +} + +func (s *ManagerSuite) TestStopHistoryArchiveTasksIntervals() { + + torrentConfig := buildTorrentConfig() + s.manager.SetTorrentConfig(&torrentConfig) + + err := s.manager.StartTorrentClient() + s.Require().NoError(err) + defer s.manager.StopTorrentClient() + + community, _, err := s.buildCommunityWithChat() + s.Require().NoError(err) + + interval := 10 * time.Second + go s.manager.StartHistoryArchiveTasksInterval(community, interval) + + time.Sleep(2 * time.Second) + s.Require().Len(s.manager.historyArchiveTasks, 1) + s.manager.StopHistoryArchiveTasksIntervals() + s.Require().Len(s.manager.historyArchiveTasks, 0) +} + +func (s *ManagerSuite) TestStopTorrentClient_ShouldStopHistoryArchiveTasks() { + torrentConfig := buildTorrentConfig() + s.manager.SetTorrentConfig(&torrentConfig) + + err := s.manager.StartTorrentClient() + s.Require().NoError(err) + defer s.manager.StopTorrentClient() + + community, _, err := s.buildCommunityWithChat() + s.Require().NoError(err) + + interval := 10 * time.Second + go s.manager.StartHistoryArchiveTasksInterval(community, interval) + // Due to async exec we need to wait a bit until we check + // the task count. + time.Sleep(2 * time.Second) + s.Require().Len(s.manager.historyArchiveTasks, 1) + + errs := s.manager.StopTorrentClient() + s.Require().Len(errs, 0) + s.Require().Len(s.manager.historyArchiveTasks, 0) +} + +func (s *ManagerSuite) TestCreateHistoryArchiveTorrent_WithoutMessages() { + + torrentConfig := buildTorrentConfig() + s.manager.SetTorrentConfig(&torrentConfig) + + community, chatID, err := s.buildCommunityWithChat() + s.Require().NoError(err) + + topic := types.BytesToTopic(transport.ToTopic(chatID)) + topics := []types.TopicType{topic} + + // Time range of 7 days + startDate := time.Date(2020, 1, 1, 00, 00, 00, 0, time.UTC) + endDate := time.Date(2020, 1, 7, 00, 00, 00, 0, time.UTC) + // Partition of 7 days + partition := 7 * 24 * time.Hour + + err = s.manager.CreateHistoryArchiveTorrent(community.ID(), topics, startDate, endDate, partition) + s.Require().NoError(err) + + // There are no waku messages in the database so we don't expect + // any archives to be created + _, err = os.Stat(s.manager.archiveDataFile(community.IDString())) + s.Require().Error(err) + _, err = os.Stat(s.manager.archiveIndexFile(community.IDString())) + s.Require().Error(err) + _, err = os.Stat(s.manager.torrentFile(community.IDString())) + s.Require().Error(err) +} + +func (s *ManagerSuite) TestCreateHistoryArchiveTorrent_ShouldCreateArchive() { + torrentConfig := buildTorrentConfig() + s.manager.SetTorrentConfig(&torrentConfig) + + community, chatID, err := s.buildCommunityWithChat() + s.Require().NoError(err) + + topic := types.BytesToTopic(transport.ToTopic(chatID)) + topics := []types.TopicType{topic} + + // Time range of 7 days + startDate := time.Date(2020, 1, 1, 00, 00, 00, 0, time.UTC) + endDate := time.Date(2020, 1, 7, 00, 00, 00, 0, time.UTC) + // Partition of 7 days, this should create a single archive + partition := 7 * 24 * time.Hour + + message1 := buildMessage(startDate.Add(1*time.Hour), topic, []byte{1}) + message2 := buildMessage(startDate.Add(2*time.Hour), topic, []byte{2}) + // This message is outside of the startDate-endDate range and should not + // be part of the archive + message3 := buildMessage(endDate.Add(2*time.Hour), topic, []byte{3}) + + err = s.manager.StoreWakuMessage(&message1) + s.Require().NoError(err) + err = s.manager.StoreWakuMessage(&message2) + s.Require().NoError(err) + err = s.manager.StoreWakuMessage(&message3) + s.Require().NoError(err) + + err = s.manager.CreateHistoryArchiveTorrent(community.ID(), topics, startDate, endDate, partition) + s.Require().NoError(err) + + _, err = os.Stat(s.manager.archiveDataFile(community.IDString())) + s.Require().NoError(err) + _, err = os.Stat(s.manager.archiveIndexFile(community.IDString())) + s.Require().NoError(err) + _, err = os.Stat(s.manager.torrentFile(community.IDString())) + s.Require().NoError(err) + + index, err := s.manager.loadHistoryArchiveIndexFromFile(community.ID()) + s.Require().NoError(err) + s.Require().Len(index.Archives, 1) + + totalData, err := os.ReadFile(s.manager.archiveDataFile(community.IDString())) + s.Require().NoError(err) + + for _, metadata := range index.Archives { + archive := &protobuf.WakuMessageArchive{} + data := totalData[metadata.Offset : metadata.Offset+metadata.Size-metadata.Padding] + + err = proto.Unmarshal(data, archive) + s.Require().NoError(err) + + s.Require().Len(archive.Messages, 2) + } +} + +func (s *ManagerSuite) TestCreateHistoryArchiveTorrent_ShouldCreateMultipleArchives() { + torrentConfig := buildTorrentConfig() + s.manager.SetTorrentConfig(&torrentConfig) + + community, chatID, err := s.buildCommunityWithChat() + s.Require().NoError(err) + + topic := types.BytesToTopic(transport.ToTopic(chatID)) + topics := []types.TopicType{topic} + + // Time range of 3 weeks + startDate := time.Date(2020, 1, 1, 00, 00, 00, 0, time.UTC) + endDate := time.Date(2020, 1, 21, 00, 00, 00, 0, time.UTC) + // 7 days partition, this should create three archives + partition := 7 * 24 * time.Hour + + message1 := buildMessage(startDate.Add(1*time.Hour), topic, []byte{1}) + message2 := buildMessage(startDate.Add(2*time.Hour), topic, []byte{2}) + // We expect 2 archives to be created for startDate - endDate of each + // 7 days of data. This message should end up in the second archive + message3 := buildMessage(startDate.Add(8*24*time.Hour), topic, []byte{3}) + // This one should end up in the third archive + message4 := buildMessage(startDate.Add(14*24*time.Hour), topic, []byte{4}) + + err = s.manager.StoreWakuMessage(&message1) + s.Require().NoError(err) + err = s.manager.StoreWakuMessage(&message2) + s.Require().NoError(err) + err = s.manager.StoreWakuMessage(&message3) + s.Require().NoError(err) + err = s.manager.StoreWakuMessage(&message4) + s.Require().NoError(err) + + err = s.manager.CreateHistoryArchiveTorrent(community.ID(), topics, startDate, endDate, partition) + s.Require().NoError(err) + + index, err := s.manager.loadHistoryArchiveIndexFromFile(community.ID()) + s.Require().NoError(err) + s.Require().Len(index.Archives, 3) + + totalData, err := os.ReadFile(s.manager.archiveDataFile(community.IDString())) + s.Require().NoError(err) + + // First archive has 2 messages + // Second archive has 1 message + // Third archive has 1 message + fromMap := map[uint64]int{ + uint64(startDate.Unix()): 2, + uint64(startDate.Add(partition).Unix()): 1, + uint64(startDate.Add(partition * 2).Unix()): 1, + } + + for _, metadata := range index.Archives { + archive := &protobuf.WakuMessageArchive{} + data := totalData[metadata.Offset : metadata.Offset+metadata.Size-metadata.Padding] + + err = proto.Unmarshal(data, archive) + s.Require().NoError(err) + s.Require().Len(archive.Messages, fromMap[metadata.Metadata.From]) + } +} + +func (s *ManagerSuite) TestCreateHistoryArchiveTorrent_ShouldAppendArchives() { + torrentConfig := buildTorrentConfig() + s.manager.SetTorrentConfig(&torrentConfig) + + community, chatID, err := s.buildCommunityWithChat() + s.Require().NoError(err) + + topic := types.BytesToTopic(transport.ToTopic(chatID)) + topics := []types.TopicType{topic} + + // Time range of 1 week + startDate := time.Date(2020, 1, 1, 00, 00, 00, 0, time.UTC) + endDate := time.Date(2020, 1, 7, 00, 00, 00, 0, time.UTC) + // 7 days partition, this should create one archive + partition := 7 * 24 * time.Hour + + message1 := buildMessage(startDate.Add(1*time.Hour), topic, []byte{1}) + err = s.manager.StoreWakuMessage(&message1) + s.Require().NoError(err) + + err = s.manager.CreateHistoryArchiveTorrent(community.ID(), topics, startDate, endDate, partition) + s.Require().NoError(err) + + index, err := s.manager.loadHistoryArchiveIndexFromFile(community.ID()) + s.Require().NoError(err) + s.Require().Len(index.Archives, 1) + + // Time range of next week + startDate = time.Date(2020, 1, 7, 00, 00, 00, 0, time.UTC) + endDate = time.Date(2020, 1, 14, 00, 00, 00, 0, time.UTC) + + message2 := buildMessage(startDate.Add(2*time.Hour), topic, []byte{2}) + err = s.manager.StoreWakuMessage(&message2) + s.Require().NoError(err) + + err = s.manager.CreateHistoryArchiveTorrent(community.ID(), topics, startDate, endDate, partition) + s.Require().NoError(err) + + index, err = s.manager.loadHistoryArchiveIndexFromFile(community.ID()) + s.Require().NoError(err) + s.Require().Len(index.Archives, 2) +} + +func (s *ManagerSuite) TestSeedHistoryArchiveTorrent() { + torrentConfig := buildTorrentConfig() + s.manager.SetTorrentConfig(&torrentConfig) + + err := s.manager.StartTorrentClient() + s.Require().NoError(err) + defer s.manager.StopTorrentClient() + + community, chatID, err := s.buildCommunityWithChat() + s.Require().NoError(err) + + topic := types.BytesToTopic(transport.ToTopic(chatID)) + topics := []types.TopicType{topic} + + startDate := time.Date(2020, 1, 1, 00, 00, 00, 0, time.UTC) + endDate := time.Date(2020, 1, 7, 00, 00, 00, 0, time.UTC) + partition := 7 * 24 * time.Hour + + message1 := buildMessage(startDate.Add(1*time.Hour), topic, []byte{1}) + err = s.manager.StoreWakuMessage(&message1) + s.Require().NoError(err) + + err = s.manager.CreateHistoryArchiveTorrent(community.ID(), topics, startDate, endDate, partition) + s.Require().NoError(err) + + err = s.manager.SeedHistoryArchiveTorrent(community.ID()) + s.Require().NoError(err) + s.Require().Len(s.manager.torrentTasks, 1) + + metaInfoHash := s.manager.torrentTasks[community.IDString()] + torrent, ok := s.manager.torrentClient.Torrent(metaInfoHash) + defer torrent.Drop() + + s.Require().Equal(ok, true) + s.Require().Equal(torrent.Seeding(), true) +} + +func (s *ManagerSuite) TestUnseedHistoryArchiveTorrent() { + torrentConfig := buildTorrentConfig() + s.manager.SetTorrentConfig(&torrentConfig) + + err := s.manager.StartTorrentClient() + s.Require().NoError(err) + defer s.manager.StopTorrentClient() + + community, chatID, err := s.buildCommunityWithChat() + s.Require().NoError(err) + + topic := types.BytesToTopic(transport.ToTopic(chatID)) + topics := []types.TopicType{topic} + + startDate := time.Date(2020, 1, 1, 00, 00, 00, 0, time.UTC) + endDate := time.Date(2020, 1, 7, 00, 00, 00, 0, time.UTC) + partition := 7 * 24 * time.Hour + + message1 := buildMessage(startDate.Add(1*time.Hour), topic, []byte{1}) + err = s.manager.StoreWakuMessage(&message1) + s.Require().NoError(err) + + err = s.manager.CreateHistoryArchiveTorrent(community.ID(), topics, startDate, endDate, partition) + s.Require().NoError(err) + + err = s.manager.SeedHistoryArchiveTorrent(community.ID()) + s.Require().NoError(err) + s.Require().Len(s.manager.torrentTasks, 1) + + metaInfoHash := s.manager.torrentTasks[community.IDString()] + + s.manager.UnseedHistoryArchiveTorrent(community.ID()) + _, ok := s.manager.torrentClient.Torrent(metaInfoHash) + s.Require().Equal(ok, false) +} + +func buildTorrentConfig() params.TorrentConfig { + torrentConfig := params.TorrentConfig{ + Enabled: true, + DataDir: os.TempDir() + "/archivedata", + TorrentDir: os.TempDir() + "/torrents", + Port: 9999, + } + return torrentConfig +} + +func buildMessage(timestamp time.Time, topic types.TopicType, hash []byte) types.Message { + message := types.Message{ + Sig: []byte{1}, + Timestamp: uint32(timestamp.Unix()), + Topic: topic, + Payload: []byte{1}, + Padding: []byte{1}, + Hash: hash, + } + return message +} + +func (s *ManagerSuite) buildCommunityWithChat() (*Community, string, error) { createRequest := &requests.CreateCommunity{ Name: "status", Description: "status community description", Membership: protobuf.CommunityPermissions_NO_MEMBERSHIP, } - community, err := s.manager.CreateCommunity(createRequest) - s.Require().NoError(err) - s.Require().NotNil(community) - + if err != nil { + return nil, "", err + } chat := &protobuf.CommunityChat{ Identity: &protobuf.ChatIdentity{ DisplayName: "added-chat", @@ -127,11 +523,15 @@ func (s *ManagerSuite) TestGetAdminCommuniesChatIDs() { }, Members: make(map[string]*protobuf.CommunityMember), } + _, changes, err := s.manager.CreateChat(community.ID(), chat) + if err != nil { + return nil, "", err + } - _, _, err = s.manager.CreateChat(community.ID(), chat) - s.Require().NoError(err) - - adminChatIDs, err := s.manager.GetAdminCommunitiesChatIDs() - s.Require().NoError(err) - s.Require().Len(adminChatIDs, 1) + chatID := "" + for cID := range changes.ChatsAdded { + chatID = cID + break + } + return community, chatID, nil } diff --git a/protocol/communities/persistence.go b/protocol/communities/persistence.go index cf3ee7f4039..821d5e3edf2 100644 --- a/protocol/communities/persistence.go +++ b/protocol/communities/persistence.go @@ -5,6 +5,7 @@ import ( "crypto/ecdsa" "database/sql" "errors" + "fmt" "github.com/golang/protobuf/proto" "go.uber.org/zap" @@ -22,6 +23,7 @@ type Persistence struct { var ErrOldRequestToJoin = errors.New("old request to join") +const OR = " OR " const communitiesBaseQuery = `SELECT c.id, c.private_key, c.description,c.joined,c.verified,c.muted,r.clock FROM communities_communities c LEFT JOIN communities_requests_to_join r ON c.id = r.community_id AND r.public_key = ?` func (p *Persistence) SaveCommunity(community *Community) error { @@ -359,6 +361,114 @@ func (p *Persistence) SaveWakuMessage(message *types.Message) error { return err } +func wakuMessageTimestampQuery(topics []types.TopicType) string { + query := " FROM waku_messages WHERE " + for i, topic := range topics { + query += `topic = "` + topic.String() + `"` + if i < len(topics)-1 { + query += OR + } + } + return query +} + +func (p *Persistence) GetOldestWakuMessageTimestamp(topics []types.TopicType) (uint64, error) { + var timestamp sql.NullInt64 + query := "SELECT MIN(timestamp)" + query += wakuMessageTimestampQuery(topics) + err := p.db.QueryRow(query).Scan(×tamp) + return uint64(timestamp.Int64), err +} + +func (p *Persistence) GetLatestWakuMessageTimestamp(topics []types.TopicType) (uint64, error) { + var timestamp sql.NullInt64 + query := "SELECT MAX(timestamp)" + query += wakuMessageTimestampQuery(topics) + err := p.db.QueryRow(query).Scan(×tamp) + return uint64(timestamp.Int64), err +} + +func (p *Persistence) GetWakuMessagesByFilterTopic(topics []types.TopicType, from uint64, to uint64) ([]types.Message, error) { + + query := "SELECT sig, timestamp, topic, payload, padding, hash FROM waku_messages WHERE timestamp >= " + fmt.Sprint(from) + " AND timestamp < " + fmt.Sprint(to) + " AND (" + + for i, topic := range topics { + query += `topic = "` + topic.String() + `"` + if i < len(topics)-1 { + query += OR + } + } + query += ")" + + rows, err := p.db.Query(query) + if err != nil { + return nil, err + } + defer rows.Close() + messages := []types.Message{} + + for rows.Next() { + msg := types.Message{} + var topicStr string + var hashStr string + err := rows.Scan(&msg.Sig, &msg.Timestamp, &topicStr, &msg.Payload, &msg.Padding, &hashStr) + if err != nil { + return nil, err + } + msg.Topic = types.StringToTopic(topicStr) + msg.Hash = types.Hex2Bytes(hashStr) + messages = append(messages, msg) + } + + return messages, nil +} + +func (p *Persistence) GetMagnetlinkMessageClock(communityID types.HexBytes) (uint64, error) { + var magnetlinkClock uint64 + err := p.db.QueryRow(`SELECT magnetlink_clock FROM communities_archive_info WHERE community_id = ?`, communityID.String()).Scan(&magnetlinkClock) + if err == sql.ErrNoRows { + return 0, nil + } + return magnetlinkClock, err +} + +func (p *Persistence) UpdateMagnetlinkMessageClock(communityID types.HexBytes, clock uint64) error { + _, err := p.db.Exec(`UPDATE communities_archive_info SET + magnetlink_clock = ? + WHERE community_id = ?`, + clock, + communityID.String()) + return err +} + +func (p *Persistence) SaveLastMessageArchiveEndDate(communityID types.HexBytes, endDate uint64) error { + _, err := p.db.Exec(`INSERT INTO communities_archive_info (last_message_archive_end_date, community_id) VALUES (?, ?)`, + endDate, + communityID.String()) + return err +} + +func (p *Persistence) UpdateLastMessageArchiveEndDate(communityID types.HexBytes, endDate uint64) error { + _, err := p.db.Exec(`UPDATE communities_archive_info SET + last_message_archive_end_date = ? + WHERE community_id = ?`, + endDate, + communityID.String()) + return err +} + +func (p *Persistence) GetLastMessageArchiveEndDate(communityID types.HexBytes) (uint64, error) { + + var lastMessageArchiveEndDate uint64 + err := p.db.QueryRow(`SELECT last_message_archive_end_date FROM communities_archive_info WHERE community_id = ?`, communityID.String()).Scan(&lastMessageArchiveEndDate) + if err == sql.ErrNoRows { + return 0, nil + } else if err != nil { + return 0, err + } + return lastMessageArchiveEndDate, nil +} + func (p *Persistence) GetCommunitiesSettings() ([]CommunitySettings, error) { rows, err := p.db.Query("SELECT community_id, message_archive_seeding_enabled, message_archive_fetching_enabled FROM communities_settings") if err != nil { @@ -427,3 +537,22 @@ func (p *Persistence) UpdateCommunitySettings(communitySettings CommunitySetting ) return err } + +func (p *Persistence) GetCommunityChatIDs(communityID types.HexBytes) ([]string, error) { + rows, err := p.db.Query(`SELECT id FROM chats WHERE community_id = ?`, communityID.String()) + if err != nil { + return nil, err + } + defer rows.Close() + + ids := []string{} + for rows.Next() { + id := "" + err := rows.Scan(&id) + if err != nil { + return nil, err + } + ids = append(ids, id) + } + return ids, nil +} diff --git a/protocol/messenger.go b/protocol/messenger.go index 4784f4ba386..06686322230 100644 --- a/protocol/messenger.go +++ b/protocol/messenger.go @@ -150,10 +150,11 @@ type peerStatus struct { } type mailserverCycle struct { sync.RWMutex - activeMailserver *mailserversDB.Mailserver - peers map[string]peerStatus - events chan *p2p.PeerEvent - subscription event.Subscription + activeMailserver *mailserversDB.Mailserver + peers map[string]peerStatus + events chan *p2p.PeerEvent + subscription event.Subscription + availabilitySubscriptions []chan struct{} } type dbConfig struct { @@ -379,7 +380,7 @@ func NewMessenger( ensVerifier := ens.New(node, logger, transp, database, c.verifyENSURL, c.verifyENSContractAddress) - communitiesManager, err := communities.NewManager(&identity.PublicKey, database, logger, ensVerifier) + communitiesManager, err := communities.NewManager(&identity.PublicKey, database, logger, ensVerifier, transp, c.torrentConfig) if err != nil { return nil, err } @@ -425,7 +426,8 @@ func NewMessenger( peerStore: peerStore, mailservers: mailservers, mailserverCycle: mailserverCycle{ - peers: make(map[string]peerStatus), + peers: make(map[string]peerStatus), + availabilitySubscriptions: make([]chan struct{}, 0), }, mailserversDatabase: c.mailserversDatabase, account: c.account, @@ -628,6 +630,7 @@ func (m *Messenger) Start() (*MessengerResponse, error) { m.handleEncryptionLayerSubscriptions(subscriptions) m.handleCommunitiesSubscription(m.communitiesManager.Subscribe()) + m.handleCommunitiesHistoryArchivesSubscription(m.communitiesManager.Subscribe()) m.handleConnectionChange(m.online()) m.handleENSVerificationSubscription(ensSubscription) m.watchConnectionChange() @@ -657,6 +660,17 @@ func (m *Messenger) Start() (*MessengerResponse, error) { return nil, err } + if m.config.torrentConfig.Enabled { + adminCommunities, err := m.communitiesManager.Created() + if err == nil && len(adminCommunities) > 0 { + available := m.SubscribeMailserverAvailable() + go func() { + <-available + m.InitHistoryArchiveTasks(adminCommunities) + }() + } + } + err = m.httpServer.Start() if err != nil { return nil, err diff --git a/protocol/messenger_communities.go b/protocol/messenger_communities.go index b8bbd0a5ee9..56bdcd9a06d 100644 --- a/protocol/messenger_communities.go +++ b/protocol/messenger_communities.go @@ -4,6 +4,7 @@ import ( "context" "crypto/ecdsa" "fmt" + "log" "time" "github.com/golang/protobuf/proto" @@ -18,6 +19,9 @@ import ( "github.com/status-im/status-go/protocol/transport" ) +// 7 days interval +var messageArchiveInterval = 7 * 24 * time.Hour + func (m *Messenger) publishOrg(org *communities.Community) error { m.logger.Debug("publishing org", zap.String("org-id", org.IDString()), zap.Any("org", org)) payload, err := org.MarshaledDescription() @@ -59,6 +63,56 @@ func (m *Messenger) publishOrgInvitation(org *communities.Community, invitation return err } +func (m *Messenger) handleCommunitiesHistoryArchivesSubscription(c chan *communities.Subscription) { + + go func() { + for { + select { + case sub, more := <-c: + if !more { + return + } + + if sub.CreatingHistoryArchivesSignal != nil { + m.config.messengerSignalsHandler.CreatingHistoryArchives(sub.CreatingHistoryArchivesSignal.CommunityID) + } + + if sub.HistoryArchivesCreatedSignal != nil { + m.config.messengerSignalsHandler.HistoryArchivesCreated( + sub.HistoryArchivesCreatedSignal.CommunityID, + sub.HistoryArchivesCreatedSignal.From, + sub.HistoryArchivesCreatedSignal.To, + ) + } + + if sub.NoHistoryArchivesCreatedSignal != nil { + m.config.messengerSignalsHandler.NoHistoryArchivesCreated( + sub.NoHistoryArchivesCreatedSignal.CommunityID, + sub.NoHistoryArchivesCreatedSignal.From, + sub.NoHistoryArchivesCreatedSignal.To, + ) + } + + if sub.HistoryArchivesSeedingSignal != nil { + + m.config.messengerSignalsHandler.HistoryArchivesSeeding(sub.HistoryArchivesSeedingSignal.CommunityID) + + err := m.dispatchMagnetlinkMessage(sub.HistoryArchivesSeedingSignal.CommunityID) + if err != nil { + m.logger.Debug("failed to dispatch magnetlink message", zap.Error(err)) + } + } + + if sub.HistoryArchivesUnseededSignal != nil { + m.config.messengerSignalsHandler.HistoryArchivesUnseeded(sub.HistoryArchivesUnseededSignal.CommunityID) + } + case <-m.quit: + return + } + } + }() +} + // handleCommunitiesSubscription handles events from communities func (m *Messenger) handleCommunitiesSubscription(c chan *communities.Subscription) { @@ -417,6 +471,8 @@ func (m *Messenger) LeaveCommunity(communityID types.HexBytes) (*MessengerRespon return nil, err } + m.communitiesManager.StopHistoryArchiveTasksInterval(communityID) + if com, ok := mr.communities[communityID.String()]; ok { err = m.syncCommunity(context.Background(), com) if err != nil { @@ -592,6 +648,10 @@ func (m *Messenger) CreateCommunity(request *requests.CreateCommunity) (*Messeng return nil, err } + if m.config.torrentConfig.Enabled && communitySettings.HistoryArchiveSupportEnabled { + go m.communitiesManager.StartHistoryArchiveTasksInterval(community, messageArchiveInterval) + } + return response, nil } @@ -614,6 +674,18 @@ func (m *Messenger) EditCommunity(request *requests.EditCommunity) (*MessengerRe return nil, err } + id := community.ID() + + if m.config.torrentConfig.Enabled { + if !communitySettings.HistoryArchiveSupportEnabled { + m.communitiesManager.StopHistoryArchiveTasksInterval(id) + } else if !m.communitiesManager.IsSeedingHistoryArchiveTorrent(id) { + var communities []*communities.Community + communities = append(communities, community) + go m.InitHistoryArchiveTasks(communities) + } + } + response := &MessengerResponse{} response.AddCommunity(community) response.AddCommunitySettings(&communitySettings) @@ -652,6 +724,11 @@ func (m *Messenger) ImportCommunity(ctx context.Context, key *ecdsa.PrivateKey) return nil, err } + if m.config.torrentConfig.Enabled { + var communities []*communities.Community + communities = append(communities, community) + go m.InitHistoryArchiveTasks(communities) + } return response, nil } @@ -1095,6 +1172,223 @@ func (m *Messenger) handleSyncCommunity(messageState *ReceivedMessageState, sync return nil } +func (m *Messenger) InitHistoryArchiveTasks(communities []*communities.Community) { + + for _, c := range communities { + + if c.Joined() { + settings, err := m.communitiesManager.GetCommunitySettingsByID(c.ID()) + if err != nil { + log.Println("failed to get community settings", err) + m.logger.Debug("failed to get community settings", zap.Error(err)) + continue + } + if !settings.HistoryArchiveSupportEnabled { + continue + } + + filters, err := m.communitiesManager.GetCommunityChatsFilters(c.ID()) + if err != nil { + log.Println("failed to get community chats filters", err) + m.logger.Debug("failed to get community chats filters", zap.Error(err)) + continue + } + + if len(filters) == 0 { + log.Println("no filters or chats for this community starting interval", c.IDString()) + m.logger.Debug("no filters or chats for this community starting interval") + go m.communitiesManager.StartHistoryArchiveTasksInterval(c, messageArchiveInterval) + continue + } + + topics := []types.TopicType{} + + for _, filter := range filters { + topics = append(topics, filter.Topic) + } + + // First we need to know the timestamp of the latest waku message + // we've received for this community, so we can request messages we've + // possibly missed since then + latestWakuMessageTimestamp, err := m.communitiesManager.GetLatestWakuMessageTimestamp(topics) + if err != nil { + log.Println("failed to get Latest waku message timestamp", err) + m.logger.Debug("failed to get Latest waku message timestamp", zap.Error(err)) + continue + } + + if latestWakuMessageTimestamp == 0 { + // This means we don't have any waku messages for this community + // yet, either because no messages were sent in the community so far, + // or because messages haven't reached this node + // + // In this case we default to requesting messages from the store nodes + // for the past 30 days + latestWakuMessageTimestamp = uint64(time.Now().AddDate(0, 0, -30).Unix()) + } + + // Request possibly missed waku messages for community + _, err = m.syncFiltersFrom(filters, uint32(latestWakuMessageTimestamp)) + if err != nil { + log.Println("failed to request missing messages", err) + m.logger.Debug("failed to request missing messages", zap.Error(err)) + continue + } + + // We figure out the end date of the last created archive and schedule + // the interval for creating future archives + // If the last end date is at least `interval` ago, we create an archive immediately first + lastArchiveEndDateTimestamp, err := m.communitiesManager.GetHistoryArchivePartitionStartTimestamp(c.ID()) + if err != nil { + log.Println("failed to get archive partition start timestamp", err) + m.logger.Debug("failed to get archive partition start timestamp", zap.Error(err)) + continue + } + + to := time.Now() + lastArchiveEndDate := time.Unix(int64(lastArchiveEndDateTimestamp), 0) + log.Println("Last archive end date: ", lastArchiveEndDate) + durationSinceLastArchive := to.Sub(lastArchiveEndDate) + + if lastArchiveEndDateTimestamp == 0 { + // No prior messages to be archived, so we just kick off the archive creation loop + // for future archives + go m.communitiesManager.StartHistoryArchiveTasksInterval(c, messageArchiveInterval) + } else if durationSinceLastArchive < messageArchiveInterval { + // Last archive is less than `interval` old, wait until `interval` is complete, + // then create archive and kick off archive creation loop for future archives + // Seed current archive in the meantime + err := m.communitiesManager.SeedHistoryArchiveTorrent(c.ID()) + if err != nil { + m.logger.Debug("failed to seed history archive", zap.Error(err)) + } + timeToNextInterval := messageArchiveInterval - durationSinceLastArchive + + log.Println("Starting history archive tasks interval in: ", timeToNextInterval) + time.AfterFunc(timeToNextInterval, func() { + err := m.communitiesManager.CreateAndSeedHistoryArchive(c.ID(), topics, lastArchiveEndDate, to.Add(timeToNextInterval), messageArchiveInterval) + if err != nil { + m.logger.Debug("failed to get create and seed history archive", zap.Error(err)) + } + go m.communitiesManager.StartHistoryArchiveTasksInterval(c, messageArchiveInterval) + }) + } else { + // Looks like the last archive was generated more than `interval` + // ago, so lets create a new archive now and then schedule the archive + // creation loop + err := m.communitiesManager.CreateAndSeedHistoryArchive(c.ID(), topics, lastArchiveEndDate, to, messageArchiveInterval) + if err != nil { + m.logger.Debug("failed to get create and seed history archive", zap.Error(err)) + } + + go m.communitiesManager.StartHistoryArchiveTasksInterval(c, messageArchiveInterval) + } + } + } +} + +func (m *Messenger) dispatchMagnetlinkMessage(communityID string) error { + + community, err := m.communitiesManager.GetByIDString(communityID) + if err != nil { + return err + } + + magnetlink, err := m.communitiesManager.GetHistoryArchiveMagnetlink(community.ID()) + if err != nil { + return err + } + + magnetLinkMessage := &protobuf.CommunityMessageArchiveMagnetlink{ + Clock: m.getTimesource().GetCurrentTime(), + MagnetUri: magnetlink, + } + + encodedMessage, err := proto.Marshal(magnetLinkMessage) + if err != nil { + return err + } + + chatID := community.MagnetlinkMessageChannelID() + rawMessage := common.RawMessage{ + LocalChatID: chatID, + Sender: community.PrivateKey(), + Payload: encodedMessage, + MessageType: protobuf.ApplicationMetadataMessage_COMMUNITY_ARCHIVE_MAGNETLINK, + SkipGroupMessageWrap: true, + } + + log.Println("Dispatching magnet link for community ", community.IDString()) + _, err = m.sender.SendPublic(context.Background(), chatID, rawMessage) + if err != nil { + return err + } + + err = m.communitiesManager.UpdateCommunityDescriptionMagnetlinkMessageClock(community.ID(), magnetLinkMessage.Clock) + if err != nil { + return err + } + return m.communitiesManager.UpdateMagnetlinkMessageClock(community.ID(), magnetLinkMessage.Clock) +} + +func (m *Messenger) EnableCommunityHistoryArchiveProtocol() error { + nodeConfig, err := m.settings.GetNodeConfig() + if err != nil { + return err + } + + if nodeConfig.TorrentConfig.Enabled { + return nil + } + + nodeConfig.TorrentConfig.Enabled = true + err = m.settings.SaveSetting("node-config", nodeConfig) + if err != nil { + return err + } + + m.config.torrentConfig = &nodeConfig.TorrentConfig + m.communitiesManager.SetTorrentConfig(&nodeConfig.TorrentConfig) + err = m.communitiesManager.StartTorrentClient() + if err != nil { + return err + } + + communities, err := m.communitiesManager.Created() + if err != nil { + return err + } + + if len(communities) > 0 { + go m.InitHistoryArchiveTasks(communities) + } + m.config.messengerSignalsHandler.HistoryArchivesProtocolEnabled() + return nil +} + +func (m *Messenger) DisableCommunityHistoryArchiveProtocol() error { + + nodeConfig, err := m.settings.GetNodeConfig() + if err != nil { + return err + } + if !nodeConfig.TorrentConfig.Enabled { + return nil + } + + m.communitiesManager.StopTorrentClient() + + nodeConfig.TorrentConfig.Enabled = false + err = m.settings.SaveSetting("node-config", nodeConfig) + m.config.torrentConfig = &nodeConfig.TorrentConfig + m.communitiesManager.SetTorrentConfig(&nodeConfig.TorrentConfig) + if err != nil { + return err + } + m.config.messengerSignalsHandler.HistoryArchivesProtocolDisabled() + return nil +} + func (m *Messenger) GetCommunitiesSettings() ([]communities.CommunitySettings, error) { settings, err := m.communitiesManager.GetCommunitiesSettings() if err != nil { diff --git a/protocol/messenger_config.go b/protocol/messenger_config.go index a34d3137833..74a479bfac1 100644 --- a/protocol/messenger_config.go +++ b/protocol/messenger_config.go @@ -34,6 +34,13 @@ type MessengerSignalsHandler interface { HistoryRequestCompleted(requestID string) HistoryRequestFailed(requestID string, err error) BackupPerformed(uint64) + HistoryArchivesProtocolEnabled() + HistoryArchivesProtocolDisabled() + CreatingHistoryArchives(communityID string) + NoHistoryArchivesCreated(communityID string, from int, to int) + HistoryArchivesCreated(communityID string, from int, to int) + HistoryArchivesSeeding(communityID string) + HistoryArchivesUnseeded(communityID string) } type config struct { diff --git a/protocol/messenger_mailserver.go b/protocol/messenger_mailserver.go index 34e3d0262f9..29e77d8a720 100644 --- a/protocol/messenger_mailserver.go +++ b/protocol/messenger_mailserver.go @@ -332,7 +332,7 @@ func getPrioritizedBatches() []int { return []int{1, 5, 10} } -func (m *Messenger) syncFilters(filters []*transport.Filter) (*MessengerResponse, error) { +func (m *Messenger) syncFiltersFrom(filters []*transport.Filter, lastRequest uint32) (*MessengerResponse, error) { response := &MessengerResponse{} topicInfo, err := m.mailserversDatabase.Topics() if err != nil { @@ -380,11 +380,18 @@ func (m *Messenger) syncFilters(filters []*transport.Filter) (*MessengerResponse } topicData, ok := topicsData[filter.Topic.String()] + var capToDefaultSyncPeriod = true if !ok { + if lastRequest == 0 { + lastRequest = defaultPeriodFromNow + } topicData = mailservers.MailserverTopic{ Topic: filter.Topic.String(), LastRequest: int(defaultPeriodFromNow), } + } else if lastRequest != 0 { + topicData.LastRequest = int(lastRequest) + capToDefaultSyncPeriod = false } batchID := topicData.LastRequest @@ -411,11 +418,13 @@ func (m *Messenger) syncFilters(filters []*transport.Filter) (*MessengerResponse batch, ok := batches[batchID] if !ok { - from, err := m.capToDefaultSyncPeriod(uint32(topicData.LastRequest)) - if err != nil { - return nil, err + from := uint32(topicData.LastRequest) + if capToDefaultSyncPeriod { + from, err = m.capToDefaultSyncPeriod(uint32(topicData.LastRequest)) + if err != nil { + return nil, err + } } - batch = MailserverBatch{From: from, To: to} } @@ -508,6 +517,10 @@ func (m *Messenger) syncFilters(filters []*transport.Filter) (*MessengerResponse return response, nil } +func (m *Messenger) syncFilters(filters []*transport.Filter) (*MessengerResponse, error) { + return m.syncFiltersFrom(filters, 0) +} + func (m *Messenger) calculateGapForChat(chat *Chat, from uint32) (*common.Message, error) { // Chat was never synced, no gap necessary if chat.SyncedTo == 0 { diff --git a/protocol/messenger_mailserver_cycle.go b/protocol/messenger_mailserver_cycle.go index 724c84dbe83..d885e17c0a4 100644 --- a/protocol/messenger_mailserver_cycle.go +++ b/protocol/messenger_mailserver_cycle.go @@ -486,6 +486,7 @@ func (m *Messenger) handleMailserverCycleEvent(connectedPeers []ConnectedPeer) e if m.mailserverCycle.activeMailserver != nil && id == m.mailserverCycle.activeMailserver.ID { m.logger.Info("mailserver available", zap.String("address", connectedPeer.UniqueID)) + m.EmitMailserverAvailable() signal.SendMailserverAvailable(m.mailserverCycle.activeMailserver.Address, m.mailserverCycle.activeMailserver.ID) } // Query mailserver @@ -650,3 +651,18 @@ func (m *Messenger) getPinnedMailserver() (*mailservers.Mailserver, error) { return nil, nil } + +func (m *Messenger) EmitMailserverAvailable() { + for _, s := range m.mailserverCycle.availabilitySubscriptions { + s <- struct{}{} + close(s) + l := len(m.mailserverCycle.availabilitySubscriptions) + m.mailserverCycle.availabilitySubscriptions = m.mailserverCycle.availabilitySubscriptions[:l-1] + } +} + +func (m *Messenger) SubscribeMailserverAvailable() chan struct{} { + c := make(chan struct{}) + m.mailserverCycle.availabilitySubscriptions = append(m.mailserverCycle.availabilitySubscriptions, c) + return c +} diff --git a/protocol/protobuf/application_metadata_message.pb.go b/protocol/protobuf/application_metadata_message.pb.go index ea6e7187889..90603237dc4 100644 --- a/protocol/protobuf/application_metadata_message.pb.go +++ b/protocol/protobuf/application_metadata_message.pb.go @@ -66,6 +66,7 @@ const ( ApplicationMetadataMessage_SYNC_BOOKMARK ApplicationMetadataMessage_Type = 40 ApplicationMetadataMessage_SYNC_CLEAR_HISTORY ApplicationMetadataMessage_Type = 41 ApplicationMetadataMessage_SYNC_SETTING ApplicationMetadataMessage_Type = 42 + ApplicationMetadataMessage_COMMUNITY_ARCHIVE_MAGNETLINK ApplicationMetadataMessage_Type = 43 ) var ApplicationMetadataMessage_Type_name = map[int32]string{ @@ -112,6 +113,7 @@ var ApplicationMetadataMessage_Type_name = map[int32]string{ 40: "SYNC_BOOKMARK", 41: "SYNC_CLEAR_HISTORY", 42: "SYNC_SETTING", + 43: "COMMUNITY_ARCHIVE_MAGNETLINK", } var ApplicationMetadataMessage_Type_value = map[string]int32{ @@ -158,6 +160,7 @@ var ApplicationMetadataMessage_Type_value = map[string]int32{ "SYNC_BOOKMARK": 40, "SYNC_CLEAR_HISTORY": 41, "SYNC_SETTING": 42, + "COMMUNITY_ARCHIVE_MAGNETLINK": 43, } func (x ApplicationMetadataMessage_Type) String() string { @@ -236,50 +239,51 @@ func init() { } var fileDescriptor_ad09a6406fcf24c7 = []byte{ - // 715 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x54, 0x5d, 0x53, 0x5b, 0x37, - 0x10, 0xad, 0x13, 0x0a, 0x61, 0x0d, 0x44, 0x28, 0x7c, 0x18, 0x13, 0xc0, 0x71, 0xd2, 0x84, 0xa4, - 0x33, 0xee, 0x4c, 0xfb, 0xd8, 0xe9, 0x83, 0x2c, 0x2d, 0xb6, 0xb0, 0xaf, 0x74, 0x91, 0x74, 0xdd, - 0x71, 0x5f, 0x34, 0x97, 0xe2, 0x32, 0xcc, 0x00, 0xf6, 0x80, 0x79, 0xe0, 0x1f, 0xf5, 0x57, 0xf4, - 0xb7, 0x75, 0x74, 0xaf, 0xbf, 0x00, 0x13, 0x9e, 0x6c, 0x9d, 0x73, 0xb4, 0xab, 0x3d, 0xbb, 0x7b, - 0xa1, 0x9a, 0x0e, 0x06, 0x97, 0x17, 0x7f, 0xa7, 0xc3, 0x8b, 0xfe, 0xb5, 0xbf, 0xea, 0x0d, 0xd3, - 0xb3, 0x74, 0x98, 0xfa, 0xab, 0xde, 0xed, 0x6d, 0x7a, 0xde, 0xab, 0x0d, 0x6e, 0xfa, 0xc3, 0x3e, - 0x7d, 0x93, 0xfd, 0x9c, 0xde, 0xfd, 0x53, 0xfd, 0x0f, 0xa0, 0xcc, 0xa6, 0x17, 0xa2, 0x91, 0x3e, - 0xca, 0xe5, 0xf4, 0x3d, 0x2c, 0xdf, 0x5e, 0x9c, 0x5f, 0xa7, 0xc3, 0xbb, 0x9b, 0x5e, 0xa9, 0x50, - 0x29, 0x1c, 0xae, 0x98, 0x29, 0x40, 0x4b, 0xb0, 0x34, 0x48, 0xef, 0x2f, 0xfb, 0xe9, 0x59, 0xe9, - 0x55, 0xc6, 0x8d, 0x8f, 0xf4, 0x0f, 0x58, 0x18, 0xde, 0x0f, 0x7a, 0xa5, 0xd7, 0x95, 0xc2, 0xe1, - 0xda, 0xaf, 0x5f, 0x6b, 0xe3, 0x7c, 0xb5, 0xe7, 0x73, 0xd5, 0xdc, 0xfd, 0xa0, 0x67, 0xb2, 0x6b, - 0xd5, 0x7f, 0x97, 0x61, 0x21, 0x1c, 0x69, 0x11, 0x96, 0x12, 0xd5, 0x52, 0xfa, 0x4f, 0x45, 0x7e, - 0xa0, 0x04, 0x56, 0x78, 0x93, 0x39, 0x1f, 0xa1, 0xb5, 0xac, 0x81, 0xa4, 0x40, 0x29, 0xac, 0x71, - 0xad, 0x1c, 0xe3, 0xce, 0x27, 0xb1, 0x60, 0x0e, 0xc9, 0x2b, 0xba, 0x07, 0x3b, 0x11, 0x46, 0x75, - 0x34, 0xb6, 0x29, 0xe3, 0x11, 0x3c, 0xb9, 0xf2, 0x9a, 0x6e, 0xc2, 0x7a, 0xcc, 0xa4, 0xf1, 0x52, - 0x59, 0xc7, 0xda, 0x6d, 0xe6, 0xa4, 0x56, 0x64, 0x21, 0xc0, 0xb6, 0xab, 0xf8, 0x43, 0xf8, 0x47, - 0xfa, 0x11, 0x0e, 0x0c, 0x9e, 0x24, 0x68, 0x9d, 0x67, 0x42, 0x18, 0xb4, 0xd6, 0x1f, 0x69, 0xe3, - 0x9d, 0x61, 0xca, 0x32, 0x9e, 0x89, 0x16, 0xe9, 0x37, 0xf8, 0xcc, 0x38, 0xc7, 0xd8, 0xf9, 0x97, - 0xb4, 0x4b, 0xf4, 0x67, 0xf8, 0x22, 0x90, 0xb7, 0xa5, 0xc2, 0x17, 0xc5, 0x6f, 0xe8, 0x36, 0xbc, - 0x1b, 0x8b, 0x66, 0x89, 0x65, 0xba, 0x01, 0xc4, 0xa2, 0x12, 0x0f, 0x50, 0xa0, 0x07, 0xb0, 0xfb, - 0x38, 0xf6, 0xac, 0xa0, 0x18, 0xac, 0x79, 0x52, 0xa4, 0x1f, 0x19, 0x48, 0x56, 0xe6, 0xd3, 0x8c, - 0x73, 0x9d, 0x28, 0x47, 0x56, 0xe9, 0x07, 0xd8, 0x7b, 0x4a, 0xc7, 0x49, 0xbd, 0x2d, 0xb9, 0x0f, - 0x7d, 0x21, 0x6b, 0x74, 0x1f, 0xca, 0xe3, 0x7e, 0x70, 0x2d, 0xd0, 0x33, 0xd1, 0x41, 0xe3, 0xa4, - 0xc5, 0x08, 0x95, 0x23, 0x6f, 0x69, 0x15, 0xf6, 0xe3, 0xc4, 0x36, 0xbd, 0xd2, 0x4e, 0x1e, 0x49, - 0x9e, 0x87, 0x30, 0xd8, 0x90, 0xd6, 0x99, 0xdc, 0x72, 0x12, 0x1c, 0xfa, 0xbe, 0xc6, 0x1b, 0xb4, - 0xb1, 0x56, 0x16, 0xc9, 0x3a, 0xdd, 0x85, 0xed, 0xa7, 0xe2, 0x93, 0x04, 0x4d, 0x97, 0x50, 0xfa, - 0x09, 0x2a, 0xcf, 0x90, 0xd3, 0x10, 0xef, 0x42, 0xd5, 0xf3, 0xf2, 0x65, 0xfe, 0x91, 0x8d, 0x50, - 0xd2, 0x3c, 0x7a, 0x74, 0x7d, 0x33, 0x8c, 0x20, 0x46, 0xfa, 0x58, 0x7a, 0x83, 0x23, 0x9f, 0xb7, - 0xe8, 0x0e, 0x6c, 0x36, 0x8c, 0x4e, 0xe2, 0xcc, 0x16, 0x2f, 0x55, 0x47, 0xba, 0xbc, 0xba, 0x6d, - 0xba, 0x0e, 0xab, 0x39, 0x28, 0x50, 0x39, 0xe9, 0xba, 0xa4, 0x14, 0xd4, 0x5c, 0x47, 0x51, 0xa2, - 0xa4, 0xeb, 0x7a, 0x81, 0x96, 0x1b, 0x19, 0x67, 0xea, 0x1d, 0x5a, 0x82, 0x8d, 0x29, 0x35, 0x13, - 0xa7, 0x1c, 0x5e, 0x3d, 0x65, 0x26, 0xdd, 0xd6, 0xfe, 0x58, 0x4b, 0x45, 0x76, 0xe9, 0x5b, 0x28, - 0xc6, 0x52, 0x4d, 0xc6, 0xfe, 0x7d, 0xd8, 0x1d, 0x14, 0x72, 0xba, 0x3b, 0x7b, 0xe1, 0x25, 0xd6, - 0x31, 0x97, 0xd8, 0xf1, 0xea, 0xec, 0x87, 0x5a, 0x04, 0xb6, 0x71, 0x66, 0x5f, 0x0e, 0xc2, 0x50, - 0xcd, 0x9b, 0x99, 0x51, 0x6a, 0x52, 0xa1, 0x65, 0xd8, 0x62, 0x4a, 0xab, 0x6e, 0xa4, 0x13, 0xeb, - 0x23, 0x74, 0x46, 0x72, 0x5f, 0x67, 0x8e, 0x37, 0xc9, 0x87, 0xc9, 0x56, 0x65, 0x25, 0x1b, 0x8c, - 0x74, 0x07, 0x05, 0xa9, 0x86, 0xae, 0x4d, 0xe1, 0x51, 0x2a, 0x1b, 0x0c, 0x14, 0xe4, 0x23, 0x05, - 0x58, 0xac, 0x33, 0xde, 0x4a, 0x62, 0xf2, 0x69, 0x32, 0x91, 0xc1, 0xd9, 0x4e, 0xa8, 0x94, 0xa3, - 0x72, 0x68, 0x72, 0xe9, 0x4f, 0x93, 0x89, 0x7c, 0x4c, 0xe7, 0xdb, 0x88, 0x82, 0x7c, 0x0e, 0x13, - 0x37, 0x57, 0x22, 0xa4, 0x8d, 0xa4, 0xb5, 0x28, 0xc8, 0x97, 0xcc, 0x89, 0xa0, 0xa9, 0x6b, 0xdd, - 0x8a, 0x98, 0x69, 0x91, 0x43, 0xba, 0x05, 0x34, 0x7f, 0x61, 0x1b, 0x99, 0xf1, 0x4d, 0x69, 0x9d, - 0x36, 0x5d, 0xf2, 0x35, 0xd8, 0x98, 0xe1, 0x16, 0x9d, 0x93, 0xaa, 0x41, 0xbe, 0xd5, 0x57, 0xff, - 0x2a, 0xd6, 0x7e, 0xf9, 0x7d, 0xfc, 0x7d, 0x3b, 0x5d, 0xcc, 0xfe, 0xfd, 0xf6, 0x7f, 0x00, 0x00, - 0x00, 0xff, 0xff, 0xab, 0x54, 0x29, 0x45, 0x86, 0x05, 0x00, 0x00, + // 735 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x54, 0x5d, 0x53, 0x1b, 0x37, + 0x14, 0xad, 0x13, 0x0a, 0xe1, 0x1a, 0x88, 0x50, 0xf8, 0x30, 0xe6, 0xcb, 0x71, 0xd2, 0x84, 0x24, + 0x33, 0xee, 0x4c, 0xfb, 0xd8, 0xe9, 0x83, 0x2c, 0xdd, 0xd8, 0x8a, 0xbd, 0xd2, 0x46, 0xd2, 0xba, + 0xe3, 0xbe, 0x68, 0x36, 0x8d, 0xcb, 0x30, 0x03, 0xd8, 0x03, 0xe6, 0x81, 0xbf, 0xd8, 0x5f, 0xd1, + 0x9f, 0xd2, 0xd1, 0xae, 0xed, 0x35, 0x60, 0xca, 0x93, 0xad, 0x73, 0x8e, 0xee, 0xd5, 0x3d, 0xf7, + 0xde, 0x85, 0x7a, 0x3a, 0x1a, 0x9d, 0x9f, 0xfd, 0x95, 0x8e, 0xcf, 0x86, 0x97, 0xfe, 0x62, 0x30, + 0x4e, 0xbf, 0xa7, 0xe3, 0xd4, 0x5f, 0x0c, 0xae, 0xaf, 0xd3, 0xd3, 0x41, 0x63, 0x74, 0x35, 0x1c, + 0x0f, 0xe9, 0x8b, 0xec, 0xe7, 0xdb, 0xcd, 0xdf, 0xf5, 0x7f, 0x01, 0xaa, 0xac, 0xb8, 0x10, 0x4d, + 0xf4, 0x51, 0x2e, 0xa7, 0x07, 0xb0, 0x7a, 0x7d, 0x76, 0x7a, 0x99, 0x8e, 0x6f, 0xae, 0x06, 0x95, + 0x52, 0xad, 0x74, 0xb2, 0x66, 0x0a, 0x80, 0x56, 0x60, 0x65, 0x94, 0xde, 0x9e, 0x0f, 0xd3, 0xef, + 0x95, 0x67, 0x19, 0x37, 0x3d, 0xd2, 0xdf, 0x61, 0x69, 0x7c, 0x3b, 0x1a, 0x54, 0x9e, 0xd7, 0x4a, + 0x27, 0x1b, 0xbf, 0x7c, 0x68, 0x4c, 0xf3, 0x35, 0x1e, 0xcf, 0xd5, 0x70, 0xb7, 0xa3, 0x81, 0xc9, + 0xae, 0xd5, 0xff, 0x59, 0x85, 0xa5, 0x70, 0xa4, 0x65, 0x58, 0x49, 0x54, 0x47, 0xe9, 0x3f, 0x14, + 0xf9, 0x81, 0x12, 0x58, 0xe3, 0x6d, 0xe6, 0x7c, 0x84, 0xd6, 0xb2, 0x16, 0x92, 0x12, 0xa5, 0xb0, + 0xc1, 0xb5, 0x72, 0x8c, 0x3b, 0x9f, 0xc4, 0x82, 0x39, 0x24, 0xcf, 0xe8, 0x21, 0xec, 0x45, 0x18, + 0x35, 0xd1, 0xd8, 0xb6, 0x8c, 0x27, 0xf0, 0xec, 0xca, 0x73, 0xba, 0x0d, 0x9b, 0x31, 0x93, 0xc6, + 0x4b, 0x65, 0x1d, 0xeb, 0x76, 0x99, 0x93, 0x5a, 0x91, 0xa5, 0x00, 0xdb, 0xbe, 0xe2, 0x77, 0xe1, + 0x1f, 0xe9, 0x1b, 0x38, 0x36, 0xf8, 0x35, 0x41, 0xeb, 0x3c, 0x13, 0xc2, 0xa0, 0xb5, 0xfe, 0xb3, + 0x36, 0xde, 0x19, 0xa6, 0x2c, 0xe3, 0x99, 0x68, 0x99, 0x7e, 0x84, 0x77, 0x8c, 0x73, 0x8c, 0x9d, + 0x7f, 0x4a, 0xbb, 0x42, 0x3f, 0xc1, 0x7b, 0x81, 0xbc, 0x2b, 0x15, 0x3e, 0x29, 0x7e, 0x41, 0x77, + 0xe1, 0xd5, 0x54, 0x34, 0x4f, 0xac, 0xd2, 0x2d, 0x20, 0x16, 0x95, 0xb8, 0x83, 0x02, 0x3d, 0x86, + 0xfd, 0xfb, 0xb1, 0xe7, 0x05, 0xe5, 0x60, 0xcd, 0x83, 0x22, 0xfd, 0xc4, 0x40, 0xb2, 0xb6, 0x98, + 0x66, 0x9c, 0xeb, 0x44, 0x39, 0xb2, 0x4e, 0x5f, 0xc3, 0xe1, 0x43, 0x3a, 0x4e, 0x9a, 0x5d, 0xc9, + 0x7d, 0xe8, 0x0b, 0xd9, 0xa0, 0x47, 0x50, 0x9d, 0xf6, 0x83, 0x6b, 0x81, 0x9e, 0x89, 0x1e, 0x1a, + 0x27, 0x2d, 0x46, 0xa8, 0x1c, 0x79, 0x49, 0xeb, 0x70, 0x14, 0x27, 0xb6, 0xed, 0x95, 0x76, 0xf2, + 0xb3, 0xe4, 0x79, 0x08, 0x83, 0x2d, 0x69, 0x9d, 0xc9, 0x2d, 0x27, 0xc1, 0xa1, 0xff, 0xd7, 0x78, + 0x83, 0x36, 0xd6, 0xca, 0x22, 0xd9, 0xa4, 0xfb, 0xb0, 0xfb, 0x50, 0xfc, 0x35, 0x41, 0xd3, 0x27, + 0x94, 0xbe, 0x85, 0xda, 0x23, 0x64, 0x11, 0xe2, 0x55, 0xa8, 0x7a, 0x51, 0xbe, 0xcc, 0x3f, 0xb2, + 0x15, 0x4a, 0x5a, 0x44, 0x4f, 0xae, 0x6f, 0x87, 0x11, 0xc4, 0x48, 0x7f, 0x91, 0xde, 0xe0, 0xc4, + 0xe7, 0x1d, 0xba, 0x07, 0xdb, 0x2d, 0xa3, 0x93, 0x38, 0xb3, 0xc5, 0x4b, 0xd5, 0x93, 0x2e, 0xaf, + 0x6e, 0x97, 0x6e, 0xc2, 0x7a, 0x0e, 0x0a, 0x54, 0x4e, 0xba, 0x3e, 0xa9, 0x04, 0x35, 0xd7, 0x51, + 0x94, 0x28, 0xe9, 0xfa, 0x5e, 0xa0, 0xe5, 0x46, 0xc6, 0x99, 0x7a, 0x8f, 0x56, 0x60, 0xab, 0xa0, + 0xe6, 0xe2, 0x54, 0xc3, 0xab, 0x0b, 0x66, 0xd6, 0x6d, 0xed, 0xbf, 0x68, 0xa9, 0xc8, 0x3e, 0x7d, + 0x09, 0xe5, 0x58, 0xaa, 0xd9, 0xd8, 0x1f, 0x84, 0xdd, 0x41, 0x21, 0x8b, 0xdd, 0x39, 0x0c, 0x2f, + 0xb1, 0x8e, 0xb9, 0xc4, 0x4e, 0x57, 0xe7, 0x28, 0xd4, 0x22, 0xb0, 0x8b, 0x73, 0xfb, 0x72, 0x1c, + 0x86, 0x6a, 0xd1, 0xcc, 0x4c, 0x52, 0x93, 0x1a, 0xad, 0xc2, 0x0e, 0x53, 0x5a, 0xf5, 0x23, 0x9d, + 0x58, 0x1f, 0xa1, 0x33, 0x92, 0xfb, 0x26, 0x73, 0xbc, 0x4d, 0x5e, 0xcf, 0xb6, 0x2a, 0x2b, 0xd9, + 0x60, 0xa4, 0x7b, 0x28, 0x48, 0x3d, 0x74, 0xad, 0x80, 0x27, 0xa9, 0x6c, 0x30, 0x50, 0x90, 0x37, + 0x14, 0x60, 0xb9, 0xc9, 0x78, 0x27, 0x89, 0xc9, 0xdb, 0xd9, 0x44, 0x06, 0x67, 0x7b, 0xa1, 0x52, + 0x8e, 0xca, 0xa1, 0xc9, 0xa5, 0x3f, 0xcd, 0x26, 0xf2, 0x3e, 0x9d, 0x6f, 0x23, 0x0a, 0xf2, 0x2e, + 0x4c, 0xdc, 0x42, 0x89, 0x90, 0x36, 0x92, 0xd6, 0xa2, 0x20, 0xef, 0x33, 0x27, 0x82, 0xa6, 0xa9, + 0x75, 0x27, 0x62, 0xa6, 0x43, 0x4e, 0xe8, 0x0e, 0xd0, 0xfc, 0x85, 0x5d, 0x64, 0xc6, 0xb7, 0xa5, + 0x75, 0xda, 0xf4, 0xc9, 0x87, 0x60, 0x63, 0x86, 0x5b, 0x74, 0x4e, 0xaa, 0x16, 0xf9, 0x48, 0x6b, + 0x70, 0x50, 0x34, 0x82, 0x19, 0xde, 0x96, 0x3d, 0xf4, 0x11, 0x6b, 0x29, 0x74, 0x5d, 0xa9, 0x3a, + 0xe4, 0x53, 0x73, 0xfd, 0xcf, 0x72, 0xe3, 0xe7, 0xdf, 0xa6, 0x5f, 0xc0, 0x6f, 0xcb, 0xd9, 0xbf, + 0x5f, 0xff, 0x0b, 0x00, 0x00, 0xff, 0xff, 0xb9, 0x43, 0x6c, 0x84, 0xa8, 0x05, 0x00, 0x00, } diff --git a/protocol/protobuf/application_metadata_message.proto b/protocol/protobuf/application_metadata_message.proto index 9e1b60582fa..03127e2a03b 100644 --- a/protocol/protobuf/application_metadata_message.proto +++ b/protocol/protobuf/application_metadata_message.proto @@ -56,5 +56,6 @@ message ApplicationMetadataMessage { SYNC_BOOKMARK = 40; SYNC_CLEAR_HISTORY = 41; SYNC_SETTING = 42; + COMMUNITY_ARCHIVE_MAGNETLINK = 43; } } diff --git a/protocol/protobuf/communities.pb.go b/protocol/protobuf/communities.pb.go index 8c1ad094215..1513d3743fe 100644 --- a/protocol/protobuf/communities.pb.go +++ b/protocol/protobuf/communities.pb.go @@ -245,6 +245,7 @@ type CommunityDescription struct { Chats map[string]*CommunityChat `protobuf:"bytes,6,rep,name=chats,proto3" json:"chats,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` BanList []string `protobuf:"bytes,7,rep,name=ban_list,json=banList,proto3" json:"ban_list,omitempty"` Categories map[string]*CommunityCategory `protobuf:"bytes,8,rep,name=categories,proto3" json:"categories,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + MagnetlinkClock uint64 `protobuf:"varint,9,opt,name=magnetlink_clock,json=magnetlinkClock,proto3" json:"magnetlink_clock,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -324,6 +325,13 @@ func (m *CommunityDescription) GetCategories() map[string]*CommunityCategory { return nil } +func (m *CommunityDescription) GetMagnetlinkClock() uint64 { + if m != nil { + return m.MagnetlinkClock + } + return 0 +} + type CommunityChat struct { Members map[string]*CommunityMember `protobuf:"bytes,1,rep,name=members,proto3" json:"members,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` Permissions *CommunityPermissions `protobuf:"bytes,2,opt,name=permissions,proto3" json:"permissions,omitempty"` @@ -639,6 +647,360 @@ func (m *CommunityRequestToJoinResponse) GetGrant() []byte { return nil } +type CommunityMessageArchiveMagnetlink struct { + Clock uint64 `protobuf:"varint,1,opt,name=clock,proto3" json:"clock,omitempty"` + MagnetUri string `protobuf:"bytes,2,opt,name=magnet_uri,json=magnetUri,proto3" json:"magnet_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommunityMessageArchiveMagnetlink) Reset() { *m = CommunityMessageArchiveMagnetlink{} } +func (m *CommunityMessageArchiveMagnetlink) String() string { return proto.CompactTextString(m) } +func (*CommunityMessageArchiveMagnetlink) ProtoMessage() {} +func (*CommunityMessageArchiveMagnetlink) Descriptor() ([]byte, []int) { + return fileDescriptor_f937943d74c1cd8b, []int{9} +} + +func (m *CommunityMessageArchiveMagnetlink) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommunityMessageArchiveMagnetlink.Unmarshal(m, b) +} +func (m *CommunityMessageArchiveMagnetlink) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommunityMessageArchiveMagnetlink.Marshal(b, m, deterministic) +} +func (m *CommunityMessageArchiveMagnetlink) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommunityMessageArchiveMagnetlink.Merge(m, src) +} +func (m *CommunityMessageArchiveMagnetlink) XXX_Size() int { + return xxx_messageInfo_CommunityMessageArchiveMagnetlink.Size(m) +} +func (m *CommunityMessageArchiveMagnetlink) XXX_DiscardUnknown() { + xxx_messageInfo_CommunityMessageArchiveMagnetlink.DiscardUnknown(m) +} + +var xxx_messageInfo_CommunityMessageArchiveMagnetlink proto.InternalMessageInfo + +func (m *CommunityMessageArchiveMagnetlink) GetClock() uint64 { + if m != nil { + return m.Clock + } + return 0 +} + +func (m *CommunityMessageArchiveMagnetlink) GetMagnetUri() string { + if m != nil { + return m.MagnetUri + } + return "" +} + +type WakuMessage struct { + Sig []byte `protobuf:"bytes,1,opt,name=sig,proto3" json:"sig,omitempty"` + Timestamp uint64 `protobuf:"varint,2,opt,name=timestamp,proto3" json:"timestamp,omitempty"` + Topic []byte `protobuf:"bytes,3,opt,name=topic,proto3" json:"topic,omitempty"` + Payload []byte `protobuf:"bytes,4,opt,name=payload,proto3" json:"payload,omitempty"` + Padding []byte `protobuf:"bytes,5,opt,name=padding,proto3" json:"padding,omitempty"` + Hash []byte `protobuf:"bytes,6,opt,name=hash,proto3" json:"hash,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WakuMessage) Reset() { *m = WakuMessage{} } +func (m *WakuMessage) String() string { return proto.CompactTextString(m) } +func (*WakuMessage) ProtoMessage() {} +func (*WakuMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_f937943d74c1cd8b, []int{10} +} + +func (m *WakuMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WakuMessage.Unmarshal(m, b) +} +func (m *WakuMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WakuMessage.Marshal(b, m, deterministic) +} +func (m *WakuMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_WakuMessage.Merge(m, src) +} +func (m *WakuMessage) XXX_Size() int { + return xxx_messageInfo_WakuMessage.Size(m) +} +func (m *WakuMessage) XXX_DiscardUnknown() { + xxx_messageInfo_WakuMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_WakuMessage proto.InternalMessageInfo + +func (m *WakuMessage) GetSig() []byte { + if m != nil { + return m.Sig + } + return nil +} + +func (m *WakuMessage) GetTimestamp() uint64 { + if m != nil { + return m.Timestamp + } + return 0 +} + +func (m *WakuMessage) GetTopic() []byte { + if m != nil { + return m.Topic + } + return nil +} + +func (m *WakuMessage) GetPayload() []byte { + if m != nil { + return m.Payload + } + return nil +} + +func (m *WakuMessage) GetPadding() []byte { + if m != nil { + return m.Padding + } + return nil +} + +func (m *WakuMessage) GetHash() []byte { + if m != nil { + return m.Hash + } + return nil +} + +type WakuMessageArchiveMetadata struct { + Version uint32 `protobuf:"varint,1,opt,name=version,proto3" json:"version,omitempty"` + From uint64 `protobuf:"varint,2,opt,name=from,proto3" json:"from,omitempty"` + To uint64 `protobuf:"varint,3,opt,name=to,proto3" json:"to,omitempty"` + ContentTopic [][]byte `protobuf:"bytes,4,rep,name=contentTopic,proto3" json:"contentTopic,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WakuMessageArchiveMetadata) Reset() { *m = WakuMessageArchiveMetadata{} } +func (m *WakuMessageArchiveMetadata) String() string { return proto.CompactTextString(m) } +func (*WakuMessageArchiveMetadata) ProtoMessage() {} +func (*WakuMessageArchiveMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_f937943d74c1cd8b, []int{11} +} + +func (m *WakuMessageArchiveMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WakuMessageArchiveMetadata.Unmarshal(m, b) +} +func (m *WakuMessageArchiveMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WakuMessageArchiveMetadata.Marshal(b, m, deterministic) +} +func (m *WakuMessageArchiveMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_WakuMessageArchiveMetadata.Merge(m, src) +} +func (m *WakuMessageArchiveMetadata) XXX_Size() int { + return xxx_messageInfo_WakuMessageArchiveMetadata.Size(m) +} +func (m *WakuMessageArchiveMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_WakuMessageArchiveMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_WakuMessageArchiveMetadata proto.InternalMessageInfo + +func (m *WakuMessageArchiveMetadata) GetVersion() uint32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *WakuMessageArchiveMetadata) GetFrom() uint64 { + if m != nil { + return m.From + } + return 0 +} + +func (m *WakuMessageArchiveMetadata) GetTo() uint64 { + if m != nil { + return m.To + } + return 0 +} + +func (m *WakuMessageArchiveMetadata) GetContentTopic() [][]byte { + if m != nil { + return m.ContentTopic + } + return nil +} + +type WakuMessageArchive struct { + Version uint32 `protobuf:"varint,1,opt,name=version,proto3" json:"version,omitempty"` + Metadata *WakuMessageArchiveMetadata `protobuf:"bytes,2,opt,name=metadata,proto3" json:"metadata,omitempty"` + Messages []*WakuMessage `protobuf:"bytes,3,rep,name=messages,proto3" json:"messages,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WakuMessageArchive) Reset() { *m = WakuMessageArchive{} } +func (m *WakuMessageArchive) String() string { return proto.CompactTextString(m) } +func (*WakuMessageArchive) ProtoMessage() {} +func (*WakuMessageArchive) Descriptor() ([]byte, []int) { + return fileDescriptor_f937943d74c1cd8b, []int{12} +} + +func (m *WakuMessageArchive) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WakuMessageArchive.Unmarshal(m, b) +} +func (m *WakuMessageArchive) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WakuMessageArchive.Marshal(b, m, deterministic) +} +func (m *WakuMessageArchive) XXX_Merge(src proto.Message) { + xxx_messageInfo_WakuMessageArchive.Merge(m, src) +} +func (m *WakuMessageArchive) XXX_Size() int { + return xxx_messageInfo_WakuMessageArchive.Size(m) +} +func (m *WakuMessageArchive) XXX_DiscardUnknown() { + xxx_messageInfo_WakuMessageArchive.DiscardUnknown(m) +} + +var xxx_messageInfo_WakuMessageArchive proto.InternalMessageInfo + +func (m *WakuMessageArchive) GetVersion() uint32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *WakuMessageArchive) GetMetadata() *WakuMessageArchiveMetadata { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *WakuMessageArchive) GetMessages() []*WakuMessage { + if m != nil { + return m.Messages + } + return nil +} + +type WakuMessageArchiveIndexMetadata struct { + Version uint32 `protobuf:"varint,1,opt,name=version,proto3" json:"version,omitempty"` + Metadata *WakuMessageArchiveMetadata `protobuf:"bytes,2,opt,name=metadata,proto3" json:"metadata,omitempty"` + Offset uint64 `protobuf:"varint,3,opt,name=offset,proto3" json:"offset,omitempty"` + Size uint64 `protobuf:"varint,4,opt,name=size,proto3" json:"size,omitempty"` + Padding uint64 `protobuf:"varint,5,opt,name=padding,proto3" json:"padding,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WakuMessageArchiveIndexMetadata) Reset() { *m = WakuMessageArchiveIndexMetadata{} } +func (m *WakuMessageArchiveIndexMetadata) String() string { return proto.CompactTextString(m) } +func (*WakuMessageArchiveIndexMetadata) ProtoMessage() {} +func (*WakuMessageArchiveIndexMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_f937943d74c1cd8b, []int{13} +} + +func (m *WakuMessageArchiveIndexMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WakuMessageArchiveIndexMetadata.Unmarshal(m, b) +} +func (m *WakuMessageArchiveIndexMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WakuMessageArchiveIndexMetadata.Marshal(b, m, deterministic) +} +func (m *WakuMessageArchiveIndexMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_WakuMessageArchiveIndexMetadata.Merge(m, src) +} +func (m *WakuMessageArchiveIndexMetadata) XXX_Size() int { + return xxx_messageInfo_WakuMessageArchiveIndexMetadata.Size(m) +} +func (m *WakuMessageArchiveIndexMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_WakuMessageArchiveIndexMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_WakuMessageArchiveIndexMetadata proto.InternalMessageInfo + +func (m *WakuMessageArchiveIndexMetadata) GetVersion() uint32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *WakuMessageArchiveIndexMetadata) GetMetadata() *WakuMessageArchiveMetadata { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *WakuMessageArchiveIndexMetadata) GetOffset() uint64 { + if m != nil { + return m.Offset + } + return 0 +} + +func (m *WakuMessageArchiveIndexMetadata) GetSize() uint64 { + if m != nil { + return m.Size + } + return 0 +} + +func (m *WakuMessageArchiveIndexMetadata) GetPadding() uint64 { + if m != nil { + return m.Padding + } + return 0 +} + +type WakuMessageArchiveIndex struct { + Archives map[string]*WakuMessageArchiveIndexMetadata `protobuf:"bytes,1,rep,name=archives,proto3" json:"archives,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WakuMessageArchiveIndex) Reset() { *m = WakuMessageArchiveIndex{} } +func (m *WakuMessageArchiveIndex) String() string { return proto.CompactTextString(m) } +func (*WakuMessageArchiveIndex) ProtoMessage() {} +func (*WakuMessageArchiveIndex) Descriptor() ([]byte, []int) { + return fileDescriptor_f937943d74c1cd8b, []int{14} +} + +func (m *WakuMessageArchiveIndex) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WakuMessageArchiveIndex.Unmarshal(m, b) +} +func (m *WakuMessageArchiveIndex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WakuMessageArchiveIndex.Marshal(b, m, deterministic) +} +func (m *WakuMessageArchiveIndex) XXX_Merge(src proto.Message) { + xxx_messageInfo_WakuMessageArchiveIndex.Merge(m, src) +} +func (m *WakuMessageArchiveIndex) XXX_Size() int { + return xxx_messageInfo_WakuMessageArchiveIndex.Size(m) +} +func (m *WakuMessageArchiveIndex) XXX_DiscardUnknown() { + xxx_messageInfo_WakuMessageArchiveIndex.DiscardUnknown(m) +} + +var xxx_messageInfo_WakuMessageArchiveIndex proto.InternalMessageInfo + +func (m *WakuMessageArchiveIndex) GetArchives() map[string]*WakuMessageArchiveIndexMetadata { + if m != nil { + return m.Archives + } + return nil +} + func init() { proto.RegisterEnum("protobuf.CommunityMember_Roles", CommunityMember_Roles_name, CommunityMember_Roles_value) proto.RegisterEnum("protobuf.CommunityPermissions_Access", CommunityPermissions_Access_name, CommunityPermissions_Access_value) @@ -655,64 +1017,91 @@ func init() { proto.RegisterType((*CommunityInvitation)(nil), "protobuf.CommunityInvitation") proto.RegisterType((*CommunityRequestToJoin)(nil), "protobuf.CommunityRequestToJoin") proto.RegisterType((*CommunityRequestToJoinResponse)(nil), "protobuf.CommunityRequestToJoinResponse") + proto.RegisterType((*CommunityMessageArchiveMagnetlink)(nil), "protobuf.CommunityMessageArchiveMagnetlink") + proto.RegisterType((*WakuMessage)(nil), "protobuf.WakuMessage") + proto.RegisterType((*WakuMessageArchiveMetadata)(nil), "protobuf.WakuMessageArchiveMetadata") + proto.RegisterType((*WakuMessageArchive)(nil), "protobuf.WakuMessageArchive") + proto.RegisterType((*WakuMessageArchiveIndexMetadata)(nil), "protobuf.WakuMessageArchiveIndexMetadata") + proto.RegisterType((*WakuMessageArchiveIndex)(nil), "protobuf.WakuMessageArchiveIndex") + proto.RegisterMapType((map[string]*WakuMessageArchiveIndexMetadata)(nil), "protobuf.WakuMessageArchiveIndex.ArchivesEntry") } func init() { proto.RegisterFile("communities.proto", fileDescriptor_f937943d74c1cd8b) } var fileDescriptor_f937943d74c1cd8b = []byte{ - // 859 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x55, 0x6d, 0x8f, 0xdb, 0x44, - 0x10, 0xee, 0x26, 0x71, 0xe2, 0x4c, 0x72, 0x77, 0xbe, 0xbd, 0x6b, 0xeb, 0x5e, 0x45, 0x1b, 0x2c, - 0x90, 0x82, 0x10, 0xa9, 0x48, 0x85, 0x84, 0x78, 0x29, 0xa4, 0x87, 0x55, 0x4c, 0x73, 0x4e, 0xbb, - 0xc9, 0x81, 0xe8, 0x17, 0xcb, 0x71, 0x96, 0xb2, 0x6a, 0x62, 0x1b, 0xaf, 0x73, 0x52, 0x7e, 0x00, - 0x12, 0x3f, 0x01, 0x89, 0xef, 0xfc, 0x27, 0xbe, 0xf1, 0x53, 0xd0, 0xee, 0xc6, 0x2f, 0xc9, 0x25, - 0x6d, 0x25, 0xd4, 0x4f, 0xf1, 0xec, 0xee, 0x3c, 0xf3, 0xcc, 0x33, 0x93, 0x19, 0x38, 0x0e, 0xa2, - 0xc5, 0x62, 0x19, 0xb2, 0x94, 0x51, 0xde, 0x8b, 0x93, 0x28, 0x8d, 0xb0, 0x2e, 0x7f, 0xa6, 0xcb, - 0x5f, 0xce, 0x4e, 0x82, 0x5f, 0xfd, 0xd4, 0x63, 0x33, 0x1a, 0xa6, 0x2c, 0x5d, 0xa9, 0x6b, 0xeb, - 0x0a, 0xb4, 0x27, 0x89, 0x1f, 0xa6, 0xf8, 0x7d, 0x68, 0x67, 0xce, 0x2b, 0x8f, 0xcd, 0x4c, 0xd4, - 0x41, 0xdd, 0x36, 0x69, 0xe5, 0x67, 0xce, 0x0c, 0xdf, 0x85, 0xe6, 0x82, 0x2e, 0xa6, 0x34, 0x11, - 0xf7, 0x15, 0x79, 0xaf, 0xab, 0x03, 0x67, 0x86, 0x6f, 0x43, 0x63, 0x8d, 0x6f, 0x56, 0x3b, 0xa8, - 0xdb, 0x24, 0x75, 0x61, 0x3a, 0x33, 0x7c, 0x0a, 0x5a, 0x30, 0x8f, 0x82, 0x57, 0x66, 0xad, 0x83, - 0xba, 0x35, 0xa2, 0x0c, 0xeb, 0x0f, 0x04, 0x47, 0xe7, 0x19, 0xf6, 0x85, 0x04, 0xc1, 0x9f, 0x81, - 0x96, 0x44, 0x73, 0xca, 0x4d, 0xd4, 0xa9, 0x76, 0x0f, 0xfb, 0xf7, 0x7b, 0x19, 0xf5, 0xde, 0xd6, - 0xcb, 0x1e, 0x11, 0xcf, 0x88, 0x7a, 0x6d, 0x3d, 0x02, 0x4d, 0xda, 0xd8, 0x80, 0xf6, 0xa5, 0xfb, - 0xd4, 0x1d, 0xfd, 0xe4, 0x7a, 0x64, 0x34, 0xb4, 0x8d, 0x1b, 0xb8, 0x0d, 0xba, 0xf8, 0xf2, 0x06, - 0xc3, 0xa1, 0x81, 0xf0, 0x4d, 0x38, 0x96, 0xd6, 0xc5, 0xc0, 0x1d, 0x3c, 0xb1, 0xbd, 0xcb, 0xb1, - 0x4d, 0xc6, 0x46, 0xc5, 0xfa, 0x17, 0xc1, 0x69, 0x1e, 0xe0, 0x19, 0x4d, 0x16, 0x8c, 0x73, 0x16, - 0x85, 0x1c, 0xdf, 0x01, 0x9d, 0x86, 0xdc, 0x8b, 0xc2, 0xf9, 0x4a, 0xca, 0xa1, 0x93, 0x06, 0x0d, - 0xf9, 0x28, 0x9c, 0xaf, 0xb0, 0x09, 0x8d, 0x38, 0x61, 0x57, 0x7e, 0x4a, 0xa5, 0x10, 0x3a, 0xc9, - 0x4c, 0xfc, 0x35, 0xd4, 0xfd, 0x20, 0xa0, 0x9c, 0x4b, 0x19, 0x0e, 0xfb, 0x1f, 0xee, 0xc8, 0xa2, - 0x14, 0xa4, 0x37, 0x90, 0x8f, 0xc9, 0xda, 0xc9, 0x9a, 0x40, 0x5d, 0x9d, 0x60, 0x0c, 0x87, 0x59, - 0x36, 0x83, 0xf3, 0x73, 0x7b, 0x3c, 0x36, 0x6e, 0xe0, 0x63, 0x38, 0x70, 0x47, 0xde, 0x85, 0x7d, - 0xf1, 0xd8, 0x26, 0xe3, 0xef, 0x9d, 0x67, 0x06, 0xc2, 0x27, 0x70, 0xe4, 0xb8, 0x3f, 0x3a, 0x93, - 0xc1, 0xc4, 0x19, 0xb9, 0xde, 0xc8, 0x1d, 0xfe, 0x6c, 0x54, 0xf0, 0x21, 0xc0, 0xc8, 0xf5, 0x88, - 0xfd, 0xfc, 0xd2, 0x1e, 0x4f, 0x8c, 0xaa, 0xf5, 0x97, 0x56, 0x4a, 0xf1, 0x3b, 0xca, 0x83, 0x84, - 0xc5, 0x29, 0x8b, 0xc2, 0xa2, 0x38, 0xa8, 0x54, 0x1c, 0x6c, 0x43, 0x43, 0xd5, 0x95, 0x9b, 0x95, - 0x4e, 0xb5, 0xdb, 0xea, 0x7f, 0xbc, 0x23, 0x89, 0x12, 0x4c, 0x4f, 0x95, 0x85, 0xdb, 0x61, 0x9a, - 0xac, 0x48, 0xe6, 0x8b, 0xbf, 0x85, 0x56, 0x5c, 0x64, 0x2a, 0xf5, 0x68, 0xf5, 0xef, 0xbd, 0x5e, - 0x0f, 0x52, 0x76, 0xc1, 0x7d, 0xd0, 0xb3, 0x7e, 0x35, 0x35, 0xe9, 0x7e, 0xab, 0xe4, 0x2e, 0xfb, - 0x4b, 0xdd, 0x92, 0xfc, 0x1d, 0xfe, 0x06, 0x34, 0xd1, 0x79, 0xdc, 0xac, 0x4b, 0xea, 0x1f, 0xbd, - 0x81, 0xba, 0x40, 0x59, 0x13, 0x57, 0x7e, 0xa2, 0xec, 0x53, 0x3f, 0xf4, 0xe6, 0x8c, 0xa7, 0x66, - 0xa3, 0x53, 0xed, 0x36, 0x49, 0x63, 0xea, 0x87, 0x43, 0xc6, 0x53, 0xec, 0x02, 0x04, 0x7e, 0x4a, - 0x5f, 0x46, 0x09, 0xa3, 0xdc, 0xd4, 0x65, 0x80, 0xde, 0x9b, 0x02, 0xe4, 0x0e, 0x2a, 0x4a, 0x09, - 0xe1, 0xec, 0x12, 0xda, 0x65, 0xe9, 0xb0, 0x01, 0xd5, 0x57, 0x54, 0x35, 0x5b, 0x93, 0x88, 0x4f, - 0xfc, 0x00, 0xb4, 0x2b, 0x7f, 0xbe, 0x54, 0x6d, 0xd6, 0xea, 0xdf, 0xd9, 0xfb, 0x9f, 0x20, 0xea, - 0xdd, 0x17, 0x95, 0xcf, 0xd1, 0xd9, 0x73, 0x80, 0x22, 0xad, 0x1d, 0xa0, 0x9f, 0x6c, 0x82, 0xde, - 0xde, 0x01, 0x2a, 0xfc, 0xcb, 0x90, 0x2f, 0xe0, 0x68, 0x2b, 0x91, 0x1d, 0xb8, 0x9f, 0x6e, 0xe2, - 0xde, 0xdd, 0x85, 0xab, 0x40, 0x56, 0x25, 0x6c, 0xeb, 0x9f, 0x0a, 0x1c, 0x6c, 0x04, 0xc6, 0x8f, - 0x8a, 0x06, 0x44, 0x52, 0xe4, 0x0f, 0xf6, 0x50, 0x7c, 0xbb, 0xce, 0xab, 0xfc, 0xbf, 0xce, 0xab, - 0xbe, 0x65, 0xe7, 0xdd, 0x87, 0xd6, 0xba, 0xb6, 0x72, 0x82, 0xd6, 0xa4, 0x30, 0x59, 0xb9, 0xc5, - 0x00, 0x3d, 0x03, 0x3d, 0x8e, 0x38, 0x13, 0x6d, 0x21, 0xdb, 0x59, 0x23, 0xb9, 0xfd, 0x8e, 0x5a, - 0xc1, 0x9a, 0xc1, 0xf1, 0x35, 0xed, 0xb7, 0x89, 0xa2, 0x6b, 0x44, 0x31, 0xd4, 0x42, 0x7f, 0xa1, - 0x22, 0x35, 0x89, 0xfc, 0xde, 0x20, 0x5f, 0xdd, 0x24, 0x6f, 0xfd, 0x89, 0xe0, 0x24, 0x0f, 0xe3, - 0x84, 0x57, 0x2c, 0xf5, 0xe5, 0x78, 0x79, 0x08, 0x37, 0x8b, 0xa5, 0x32, 0x2b, 0xfe, 0x14, 0xeb, - 0xed, 0x72, 0x1a, 0xec, 0x99, 0x49, 0x2f, 0xc5, 0x4a, 0x5a, 0xaf, 0x18, 0x65, 0xec, 0xdf, 0x2f, - 0xef, 0x01, 0xc4, 0xcb, 0xe9, 0x9c, 0x05, 0x9e, 0xd0, 0xab, 0x26, 0x7d, 0x9a, 0xea, 0xe4, 0x29, - 0x5d, 0x59, 0xbf, 0x23, 0xb8, 0x95, 0x53, 0x23, 0xf4, 0xb7, 0x25, 0xe5, 0xe9, 0x24, 0xfa, 0x21, - 0x62, 0xfb, 0x86, 0xdf, 0x7a, 0xea, 0x97, 0xf2, 0x17, 0x53, 0xdf, 0x15, 0x12, 0xec, 0xe5, 0xb0, - 0xbd, 0x3c, 0x6b, 0xd7, 0x96, 0xa7, 0xf5, 0x37, 0x82, 0x7b, 0xbb, 0x79, 0x10, 0xca, 0xe3, 0x28, - 0xe4, 0x74, 0x0f, 0x9f, 0xaf, 0xa0, 0x99, 0xe3, 0xbc, 0xa6, 0x93, 0x4b, 0x0a, 0x92, 0xc2, 0x41, - 0x54, 0x4d, 0x6c, 0x96, 0x38, 0xa5, 0x8a, 0xb3, 0x4e, 0x72, 0xbb, 0x10, 0xba, 0x56, 0x12, 0xfa, - 0xf1, 0xc1, 0x8b, 0x56, 0xef, 0xc1, 0x97, 0x59, 0x80, 0x69, 0x5d, 0x7e, 0x3d, 0xfc, 0x2f, 0x00, - 0x00, 0xff, 0xff, 0x49, 0xd3, 0x7e, 0x3e, 0x5b, 0x08, 0x00, 0x00, + // 1181 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x56, 0x5b, 0x6f, 0x1a, 0x47, + 0x14, 0xce, 0x72, 0x5d, 0x0e, 0xd8, 0xc6, 0x13, 0x5f, 0x08, 0x69, 0x62, 0xb2, 0x6a, 0x25, 0xac, + 0xaa, 0x58, 0x21, 0xaa, 0x54, 0xf5, 0x92, 0x84, 0xb8, 0x28, 0xa5, 0xb6, 0x21, 0x19, 0x70, 0xd3, + 0xe6, 0x65, 0xb5, 0x2c, 0x63, 0x3c, 0x32, 0xec, 0xd2, 0x9d, 0x01, 0x95, 0x3e, 0xf4, 0xad, 0x52, + 0x7f, 0x42, 0xa5, 0x3e, 0x56, 0xea, 0x1f, 0xa9, 0xd4, 0xc7, 0xbe, 0xf7, 0xad, 0x3f, 0xa5, 0x9a, + 0x99, 0xbd, 0x61, 0x83, 0x6d, 0x29, 0xea, 0x13, 0x73, 0x66, 0xe6, 0x7c, 0xf3, 0x9d, 0x73, 0xbe, + 0x3d, 0x07, 0xd8, 0xb4, 0xdd, 0xf1, 0x78, 0xea, 0x50, 0x4e, 0x09, 0xab, 0x4d, 0x3c, 0x97, 0xbb, + 0x48, 0x97, 0x3f, 0xfd, 0xe9, 0x59, 0xf9, 0xae, 0x7d, 0x6e, 0x71, 0x93, 0x0e, 0x88, 0xc3, 0x29, + 0x9f, 0xab, 0x63, 0x63, 0x06, 0xe9, 0x97, 0x9e, 0xe5, 0x70, 0xf4, 0x08, 0x0a, 0x81, 0xf3, 0xdc, + 0xa4, 0x83, 0x92, 0x56, 0xd1, 0xaa, 0x05, 0x9c, 0x0f, 0xf7, 0x5a, 0x03, 0x74, 0x1f, 0x72, 0x63, + 0x32, 0xee, 0x13, 0x4f, 0x9c, 0x27, 0xe4, 0xb9, 0xae, 0x36, 0x5a, 0x03, 0xb4, 0x0b, 0x59, 0x1f, + 0xbf, 0x94, 0xac, 0x68, 0xd5, 0x1c, 0xce, 0x08, 0xb3, 0x35, 0x40, 0x5b, 0x90, 0xb6, 0x47, 0xae, + 0x7d, 0x51, 0x4a, 0x55, 0xb4, 0x6a, 0x0a, 0x2b, 0xc3, 0xf8, 0x45, 0x83, 0x8d, 0xc3, 0x00, 0xfb, + 0x44, 0x82, 0xa0, 0x8f, 0x21, 0xed, 0xb9, 0x23, 0xc2, 0x4a, 0x5a, 0x25, 0x59, 0x5d, 0xaf, 0xef, + 0xd5, 0x02, 0xea, 0xb5, 0x4b, 0x37, 0x6b, 0x58, 0x5c, 0xc3, 0xea, 0xb6, 0xf1, 0x14, 0xd2, 0xd2, + 0x46, 0x45, 0x28, 0x9c, 0xb6, 0x8f, 0xda, 0x9d, 0x37, 0x6d, 0x13, 0x77, 0x8e, 0x9b, 0xc5, 0x3b, + 0xa8, 0x00, 0xba, 0x58, 0x99, 0x8d, 0xe3, 0xe3, 0xa2, 0x86, 0xb6, 0x61, 0x53, 0x5a, 0x27, 0x8d, + 0x76, 0xe3, 0x65, 0xd3, 0x3c, 0xed, 0x36, 0x71, 0xb7, 0x98, 0x30, 0xfe, 0xd5, 0x60, 0x2b, 0x7c, + 0xe0, 0x15, 0xf1, 0xc6, 0x94, 0x31, 0xea, 0x3a, 0x0c, 0xdd, 0x03, 0x9d, 0x38, 0xcc, 0x74, 0x9d, + 0xd1, 0x5c, 0xa6, 0x43, 0xc7, 0x59, 0xe2, 0xb0, 0x8e, 0x33, 0x9a, 0xa3, 0x12, 0x64, 0x27, 0x1e, + 0x9d, 0x59, 0x9c, 0xc8, 0x44, 0xe8, 0x38, 0x30, 0xd1, 0x17, 0x90, 0xb1, 0x6c, 0x9b, 0x30, 0x26, + 0xd3, 0xb0, 0x5e, 0xff, 0x60, 0x49, 0x14, 0xb1, 0x47, 0x6a, 0x0d, 0x79, 0x19, 0xfb, 0x4e, 0x46, + 0x0f, 0x32, 0x6a, 0x07, 0x21, 0x58, 0x0f, 0xa2, 0x69, 0x1c, 0x1e, 0x36, 0xbb, 0xdd, 0xe2, 0x1d, + 0xb4, 0x09, 0x6b, 0xed, 0x8e, 0x79, 0xd2, 0x3c, 0x79, 0xd1, 0xc4, 0xdd, 0xaf, 0x5a, 0xaf, 0x8a, + 0x1a, 0xba, 0x0b, 0x1b, 0xad, 0xf6, 0x37, 0xad, 0x5e, 0xa3, 0xd7, 0xea, 0xb4, 0xcd, 0x4e, 0xfb, + 0xf8, 0xbb, 0x62, 0x02, 0xad, 0x03, 0x74, 0xda, 0x26, 0x6e, 0xbe, 0x3e, 0x6d, 0x76, 0x7b, 0xc5, + 0xa4, 0xf1, 0x57, 0x3a, 0x16, 0xe2, 0x97, 0x84, 0xd9, 0x1e, 0x9d, 0x70, 0xea, 0x3a, 0x51, 0x71, + 0xb4, 0x58, 0x71, 0x50, 0x13, 0xb2, 0xaa, 0xae, 0xac, 0x94, 0xa8, 0x24, 0xab, 0xf9, 0xfa, 0x87, + 0x4b, 0x82, 0x88, 0xc1, 0xd4, 0x54, 0x59, 0x58, 0xd3, 0xe1, 0xde, 0x1c, 0x07, 0xbe, 0xe8, 0x39, + 0xe4, 0x27, 0x51, 0xa4, 0x32, 0x1f, 0xf9, 0xfa, 0xc3, 0xeb, 0xf3, 0x81, 0xe3, 0x2e, 0xa8, 0x0e, + 0x7a, 0xa0, 0xd7, 0x52, 0x5a, 0xba, 0xef, 0xc4, 0xdc, 0xa5, 0xbe, 0xd4, 0x29, 0x0e, 0xef, 0xa1, + 0x67, 0x90, 0x16, 0xca, 0x63, 0xa5, 0x8c, 0xa4, 0xbe, 0x7f, 0x03, 0x75, 0x81, 0xe2, 0x13, 0x57, + 0x7e, 0xa2, 0xec, 0x7d, 0xcb, 0x31, 0x47, 0x94, 0xf1, 0x52, 0xb6, 0x92, 0xac, 0xe6, 0x70, 0xb6, + 0x6f, 0x39, 0xc7, 0x94, 0x71, 0xd4, 0x06, 0xb0, 0x2d, 0x4e, 0x86, 0xae, 0x47, 0x09, 0x2b, 0xe9, + 0xf2, 0x81, 0xda, 0x4d, 0x0f, 0x84, 0x0e, 0xea, 0x95, 0x18, 0x02, 0xda, 0x87, 0xe2, 0xd8, 0x1a, + 0x3a, 0x84, 0x8f, 0xa8, 0x73, 0x61, 0xaa, 0x4a, 0xe4, 0x64, 0x25, 0x36, 0xa2, 0xfd, 0x43, 0xb1, + 0x5d, 0x3e, 0x85, 0x42, 0x3c, 0xcb, 0xa8, 0x08, 0xc9, 0x0b, 0xa2, 0x74, 0x99, 0xc3, 0x62, 0x89, + 0x0e, 0x20, 0x3d, 0xb3, 0x46, 0x53, 0xa5, 0xc8, 0x7c, 0xfd, 0xde, 0xca, 0xcf, 0x07, 0xab, 0x7b, + 0x9f, 0x26, 0x3e, 0xd1, 0xca, 0xaf, 0x01, 0xa2, 0x0c, 0x2c, 0x01, 0xfd, 0x68, 0x11, 0x74, 0x77, + 0x09, 0xa8, 0xf0, 0x8f, 0x43, 0xbe, 0x85, 0x8d, 0x4b, 0x31, 0x2f, 0xc1, 0x7d, 0xbc, 0x88, 0x7b, + 0x7f, 0x19, 0xae, 0x02, 0x99, 0xc7, 0xb0, 0x8d, 0x7f, 0x12, 0xb0, 0xb6, 0xf0, 0x30, 0x7a, 0x1a, + 0x69, 0x55, 0x93, 0xf5, 0x78, 0x7f, 0x05, 0xc5, 0xdb, 0x89, 0x34, 0xf1, 0x6e, 0x22, 0x4d, 0xde, + 0x52, 0xa4, 0x7b, 0x90, 0xf7, 0x65, 0x20, 0x9b, 0x6d, 0x4a, 0x26, 0x26, 0x50, 0x86, 0xe8, 0xb5, + 0x65, 0xd0, 0x27, 0x2e, 0xa3, 0x42, 0x41, 0x52, 0xf9, 0x69, 0x1c, 0xda, 0xff, 0x93, 0x14, 0x8c, + 0x01, 0x6c, 0x5e, 0xc9, 0xfd, 0x65, 0xa2, 0xda, 0x15, 0xa2, 0x08, 0x52, 0x8e, 0x35, 0x56, 0x2f, + 0xe5, 0xb0, 0x5c, 0x2f, 0x90, 0x4f, 0x2e, 0x92, 0x37, 0x7e, 0xd5, 0xe0, 0x6e, 0xf8, 0x4c, 0xcb, + 0x99, 0x51, 0x6e, 0xc9, 0x4e, 0xf4, 0x04, 0xb6, 0xa3, 0xf9, 0x33, 0x88, 0xbe, 0x1f, 0x7f, 0x10, + 0x6d, 0xd9, 0x2b, 0xda, 0xd7, 0x50, 0x4c, 0x2f, 0x7f, 0x1a, 0x29, 0x63, 0xf5, 0x28, 0x7a, 0x00, + 0x30, 0x99, 0xf6, 0x47, 0xd4, 0x36, 0x45, 0xbe, 0x52, 0xd2, 0x27, 0xa7, 0x76, 0x8e, 0xc8, 0xdc, + 0xf8, 0x59, 0x83, 0x9d, 0x90, 0x1a, 0x26, 0xdf, 0x4f, 0x09, 0xe3, 0x3d, 0xf7, 0x6b, 0x97, 0xae, + 0xea, 0x93, 0xfe, 0x80, 0x88, 0xc5, 0x2f, 0x06, 0x44, 0x5b, 0xa4, 0x60, 0x25, 0x87, 0xcb, 0x73, + 0x36, 0x75, 0x65, 0xce, 0x1a, 0x7f, 0x68, 0xf0, 0x70, 0x39, 0x0f, 0x4c, 0xd8, 0xc4, 0x75, 0x18, + 0x59, 0xc1, 0xe7, 0x73, 0xc8, 0x85, 0x38, 0xd7, 0x28, 0x39, 0x96, 0x41, 0x1c, 0x39, 0x88, 0xaa, + 0x89, 0x21, 0x34, 0xe1, 0x44, 0x71, 0xd6, 0x71, 0x68, 0x47, 0x89, 0x4e, 0xc5, 0x12, 0x6d, 0x7c, + 0x0b, 0x8f, 0x62, 0x7a, 0x62, 0xcc, 0x1a, 0x92, 0x86, 0x67, 0x9f, 0xd3, 0x19, 0x39, 0x09, 0xbb, + 0xd7, 0x0a, 0xaa, 0x0f, 0x00, 0x54, 0x87, 0x33, 0xa7, 0x1e, 0xf5, 0x93, 0x97, 0x53, 0x3b, 0xa7, + 0x1e, 0x35, 0x7e, 0xd3, 0x20, 0xff, 0xc6, 0xba, 0x98, 0xfa, 0xa8, 0x42, 0xe2, 0x8c, 0x0e, 0x7d, + 0x2d, 0x88, 0x25, 0x7a, 0x0f, 0x72, 0x9c, 0x8e, 0x09, 0xe3, 0xd6, 0x78, 0x22, 0xfd, 0x53, 0x38, + 0xda, 0x10, 0x8f, 0x72, 0x77, 0x42, 0x6d, 0x19, 0x48, 0x01, 0x2b, 0x43, 0x4e, 0x6d, 0x6b, 0x3e, + 0x72, 0xad, 0x20, 0xed, 0x81, 0xa9, 0x4e, 0x06, 0x03, 0xea, 0x0c, 0xe5, 0xd7, 0x26, 0x4f, 0xa4, + 0x29, 0xf4, 0x7d, 0x6e, 0xb1, 0xf3, 0x52, 0x46, 0x6e, 0xcb, 0xb5, 0xf1, 0x13, 0x94, 0x63, 0xe4, + 0x82, 0x90, 0x09, 0xb7, 0x06, 0x16, 0xb7, 0x04, 0xd6, 0x8c, 0x78, 0x2c, 0xd0, 0xee, 0x1a, 0x0e, + 0x4c, 0x81, 0x75, 0xe6, 0xb9, 0x63, 0x9f, 0xae, 0x5c, 0xa3, 0x75, 0x48, 0x70, 0x57, 0xd2, 0x4c, + 0xe1, 0x04, 0x77, 0x91, 0x21, 0xf4, 0xe1, 0x70, 0xe2, 0xf0, 0x9e, 0x0c, 0x20, 0x55, 0x49, 0x56, + 0x0b, 0x78, 0x61, 0xcf, 0xf8, 0x5d, 0x03, 0x74, 0x95, 0xc0, 0x35, 0x0f, 0x3f, 0x07, 0x7d, 0xec, + 0xd3, 0xf3, 0x75, 0x11, 0xeb, 0x92, 0xab, 0x43, 0xc1, 0xa1, 0x17, 0x7a, 0x2c, 0x10, 0xe4, 0x1d, + 0x31, 0xc8, 0x45, 0x9f, 0xdd, 0x5e, 0x8a, 0x80, 0xc3, 0x6b, 0xc6, 0x9f, 0x1a, 0xec, 0x5d, 0xc5, + 0x6e, 0x39, 0x03, 0xf2, 0xc3, 0x2d, 0x72, 0xf5, 0xee, 0x94, 0x77, 0x20, 0xe3, 0x9e, 0x9d, 0x31, + 0xc2, 0xfd, 0xec, 0xfa, 0x96, 0xa8, 0x02, 0xa3, 0x3f, 0x12, 0xff, 0xff, 0xa8, 0x5c, 0x5f, 0xae, + 0x7f, 0x2a, 0xac, 0xbf, 0xf1, 0xb7, 0x06, 0xbb, 0x2b, 0xa2, 0x40, 0x47, 0xa0, 0x5b, 0xca, 0x0e, + 0x86, 0xcf, 0xc1, 0x75, 0x1c, 0xa5, 0x53, 0xcd, 0x37, 0xfc, 0x39, 0x14, 0x02, 0x94, 0xcf, 0x60, + 0x6d, 0xe1, 0x68, 0x49, 0x5b, 0x7f, 0xb6, 0xd8, 0xd6, 0xf7, 0x6f, 0x7c, 0x2c, 0xcc, 0x4a, 0xd4, + 0xe6, 0x5f, 0xac, 0xbd, 0xcd, 0xd7, 0x0e, 0x3e, 0x0b, 0x3c, 0xfb, 0x19, 0xb9, 0x7a, 0xf2, 0x5f, + 0x00, 0x00, 0x00, 0xff, 0xff, 0xed, 0x69, 0x52, 0xe4, 0x3b, 0x0c, 0x00, 0x00, } diff --git a/protocol/protobuf/communities.proto b/protocol/protobuf/communities.proto index bc5a7c6db4c..051924c453d 100644 --- a/protocol/protobuf/communities.proto +++ b/protocol/protobuf/communities.proto @@ -43,6 +43,7 @@ message CommunityDescription { map chats = 6; repeated string ban_list = 7; map categories = 8; + uint64 magnetlink_clock = 9; } message CommunityChat { @@ -79,3 +80,42 @@ message CommunityRequestToJoinResponse { bool accepted = 3; bytes grant = 4; } + +message CommunityMessageArchiveMagnetlink { + uint64 clock = 1; + string magnet_uri = 2; +} + +message WakuMessage { + bytes sig = 1; + uint64 timestamp = 2; + bytes topic = 3; + bytes payload = 4; + bytes padding = 5; + bytes hash = 6; +} + +message WakuMessageArchiveMetadata { + uint32 version = 1; + uint64 from = 2; + uint64 to = 3; + repeated bytes contentTopic = 4; +} + +message WakuMessageArchive { + uint32 version = 1; + WakuMessageArchiveMetadata metadata = 2; + repeated WakuMessage messages = 3; +} + +message WakuMessageArchiveIndexMetadata { + uint32 version = 1; + WakuMessageArchiveMetadata metadata = 2; + uint64 offset = 3; + uint64 size = 4; + uint64 padding = 5; +} + +message WakuMessageArchiveIndex { + map archives = 1; +} diff --git a/services/ext/api.go b/services/ext/api.go index ac332f66b5b..6be440c6211 100644 --- a/services/ext/api.go +++ b/services/ext/api.go @@ -928,6 +928,14 @@ func (api *PublicAPI) GetCommunitiesSettings() ([]communities.CommunitySettings, return api.service.messenger.GetCommunitiesSettings() } +func (api *PublicAPI) EnableCommunityHistoryArchiveProtocol() error { + return api.service.messenger.EnableCommunityHistoryArchiveProtocol() +} + +func (api *PublicAPI) DisableCommunityHistoryArchiveProtocol() error { + return api.service.messenger.DisableCommunityHistoryArchiveProtocol() +} + func (api *PublicAPI) AddStorePeer(address string) (string, error) { return api.service.messenger.AddStorePeer(address) } diff --git a/services/ext/signal.go b/services/ext/signal.go index 857ceb376e0..d9d6f755736 100644 --- a/services/ext/signal.go +++ b/services/ext/signal.go @@ -86,3 +86,31 @@ func (m *MessengerSignalsHandler) HistoryRequestFailed(requestID string, err err func (m *MessengerSignalsHandler) HistoryRequestCompleted(requestID string) { signal.SendHistoricMessagesRequestCompleted(requestID) } + +func (m *MessengerSignalsHandler) HistoryArchivesProtocolEnabled() { + signal.SendHistoryArchivesProtocolEnabled() +} + +func (m *MessengerSignalsHandler) HistoryArchivesProtocolDisabled() { + signal.SendHistoryArchivesProtocolDisabled() +} + +func (m *MessengerSignalsHandler) CreatingHistoryArchives(communityID string) { + signal.SendCreatingHistoryArchives(communityID) +} + +func (m *MessengerSignalsHandler) NoHistoryArchivesCreated(communityID string, from int, to int) { + signal.SendNoHistoryArchivesCreated(communityID, from, to) +} + +func (m *MessengerSignalsHandler) HistoryArchivesCreated(communityID string, from int, to int) { + signal.SendHistoryArchivesCreated(communityID, from, to) +} + +func (m *MessengerSignalsHandler) HistoryArchivesSeeding(communityID string) { + signal.SendHistoryArchivesSeeding(communityID) +} + +func (m *MessengerSignalsHandler) HistoryArchivesUnseeded(communityID string) { + signal.SendHistoryArchivesUnseeded(communityID) +} diff --git a/signal/events_community_archives.go b/signal/events_community_archives.go new file mode 100644 index 00000000000..e9db8015f4c --- /dev/null +++ b/signal/events_community_archives.go @@ -0,0 +1,87 @@ +package signal + +const ( + + // EventHistoryArchivesEnabled triggered when the community history archive protocol + // was enabled via the RPC API + EventHistoryArchivesProtocolEnabled = "community.historyArchivesProtocolEnabled" + // EventHistoryArchivesDisabled triggered when the community history archive protocol + // was disabled via the RPC API + EventHistoryArchivesProtocolDisabled = "community.historyArchivesProtocolDisabled" + // EventCreatingHistoryArchives is triggered when the community owner node + // starts to create archives torrents + EventCreatingHistoryArchives = "community.creatingHistoryArchives" + // EventHistoryArchivesCreated is triggered when the community owner node + // has finished to create archives torrents + EventHistoryArchivesCreated = "community.historyArchivesCreated" + // EventNoHistoryArchivesCreated is triggered when the community owner node + // tried to create archives but haven't because there were no new messages + // to archive + EventNoHistoryArchivesCreated = "community.noHistoryArchivesCreated" + // EventHistoryArchivesSeeding is triggered when the community owner node + // started seeding archives torrents + EventHistoryArchivesSeeding = "community.historyArchivesSeeding" + // EventHistoryArchivesUnseeded is triggered when the community owner node + // drops a torrent for a particular community + EventHistoryArchivesUnseeded = "community.historyArchivesUnseeded" +) + +type CreatingHistoryArchivesSignal struct { + CommunityID string `json:"communityId"` +} + +type NoHistoryArchivesCreatedSignal struct { + CommunityID string `json:"communityId"` + From int `json:"from"` + To int `json:"to"` +} + +type HistoryArchivesCreatedSignal struct { + CommunityID string `json:"communityId"` + From int `json:"from"` + To int `json:"to"` +} + +type HistoryArchivesSeedingSignal struct { + CommunityID string `json:"communityId"` +} + +type HistoryArchivesUnseededSignal struct { + CommunityID string `json:"communityId"` +} + +func SendHistoryArchivesProtocolEnabled() { + send(EventHistoryArchivesProtocolEnabled, nil) +} + +func SendHistoryArchivesProtocolDisabled() { + send(EventHistoryArchivesProtocolDisabled, nil) +} + +func SendCreatingHistoryArchives(communityID string) { + send(EventCreatingHistoryArchives, CreatingHistoryArchivesSignal{CommunityID: communityID}) +} + +func SendNoHistoryArchivesCreated(communityID string, from int, to int) { + send(EventNoHistoryArchivesCreated, NoHistoryArchivesCreatedSignal{ + CommunityID: communityID, + From: from, + To: to, + }) +} + +func SendHistoryArchivesCreated(communityID string, from int, to int) { + send(EventHistoryArchivesCreated, HistoryArchivesCreatedSignal{ + CommunityID: communityID, + From: from, + To: to, + }) +} + +func SendHistoryArchivesSeeding(communityID string) { + send(EventHistoryArchivesSeeding, HistoryArchivesSeedingSignal{CommunityID: communityID}) +} + +func SendHistoryArchivesUnseeded(communityID string) { + send(EventHistoryArchivesUnseeded, HistoryArchivesUnseededSignal{CommunityID: communityID}) +}