From 5881a6dc2fcfa1c7b933957a46a72d0ac85b195c Mon Sep 17 00:00:00 2001 From: Pascal Precht <445106+PascalPrecht@users.noreply.github.com> Date: Mon, 21 Mar 2022 15:18:36 +0100 Subject: [PATCH] Introduce community history archive routine This introduces logic needed to: - Create WakuMessageArchives and and indices from store waku messages - History archive torrent data to disk and create .torrent file from that - Seed and unseed history archive torrents as necessary - Starting/stopping the torrent client - Enabling/disabling community history support for individual components and starting/stopping the routine intervals accordingly This does not yet handle magnet links (#2568) Closes #2567 --- appdatabase/migrations/bindata.go | 46 +- ...9_add_community_archives_info_table.up.sql | 7 + eth-node/types/topic.go | 17 + multiaccounts/accounts/database.go | 4 + protocol/communities/manager.go | 616 +++++++++++++++++- protocol/communities/manager_test.go | 421 +++++++++++- protocol/communities/persistence.go | 120 ++++ protocol/messenger.go | 25 +- protocol/messenger_communities.go | 251 +++++++ protocol/messenger_mailserver.go | 23 +- protocol/messenger_mailserver_cycle.go | 16 + .../application_metadata_message.pb.go | 95 +-- .../application_metadata_message.proto | 1 + protocol/protobuf/communities.pb.go | 490 ++++++++++++-- protocol/protobuf/communities.proto | 39 ++ services/ext/api.go | 8 + 16 files changed, 2030 insertions(+), 149 deletions(-) create mode 100644 appdatabase/migrations/sql/1647871329_add_community_archives_info_table.up.sql diff --git a/appdatabase/migrations/bindata.go b/appdatabase/migrations/bindata.go index d9a6b707540..5f9f6d0c5ef 100644 --- a/appdatabase/migrations/bindata.go +++ b/appdatabase/migrations/bindata.go @@ -10,6 +10,7 @@ // 1647860168_add_torrent_config.up.sql (211B) // 1647862837_add_communities_settings_table.up.sql (206B) // 1647868109_add_waku_messages_table.up.sql (186B) +// 1647871329_add_community_archives_info_table.up.sql (195B) // doc.go (74B) package migrations @@ -274,11 +275,31 @@ func _1647868109_add_waku_messages_tableUpSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "1647868109_add_waku_messages_table.up.sql", size: 186, mode: os.FileMode(0664), modTime: time.Unix(1647868130, 0)} + info := bindataFileInfo{name: "1647868109_add_waku_messages_table.up.sql", size: 186, mode: os.FileMode(0664), modTime: time.Unix(1647869823, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x31, 0x93, 0xe0, 0x8e, 0x8a, 0x28, 0x6e, 0xf5, 0xc9, 0x7c, 0xd0, 0x97, 0xe0, 0xd4, 0x67, 0xe9, 0x2b, 0xbf, 0x87, 0xb7, 0x86, 0xc7, 0xa, 0x5f, 0xe2, 0x83, 0xe1, 0x13, 0x92, 0x32, 0xd2, 0xa3}} return a, nil } +var __1647871329_add_community_archives_info_tableUpSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x74\x8d\xc1\x6a\xc4\x20\x18\x06\xef\x3e\xc5\x77\x6c\xa1\x87\xbe\x82\xb5\x7f\x20\xd4\x9a\x20\x7f\xa0\x39\x89\x18\x9b\x4a\xa2\x81\xea\x2e\xec\xdb\x2f\x7b\xc9\x6d\xcf\x33\xc3\x28\x4b\x92\x09\x2c\x3f\x34\x21\x1c\x39\x5f\x4a\x6a\x29\x56\xe7\xff\xc3\x5f\xba\x46\x97\xca\xef\x81\x17\x81\x13\xde\x5c\x5a\xc0\xf4\xc3\x18\x6d\xff\x2d\xed\x8c\x2f\x9a\x31\x18\xa8\xc1\x74\xba\x57\x0c\x4b\xa3\x96\x8a\xde\x04\x90\xfd\x5a\x62\xdb\x53\xd9\x5c\xd8\x8f\xb0\xa1\x37\x0c\x33\x30\xcc\xa4\x35\x3e\xa9\x93\x93\x66\xbc\x3f\xd4\xdd\xd7\xe6\x72\xac\xd5\xaf\xf1\xbc\xc7\xb2\xb8\xc5\xb7\xf8\xa4\x13\xaf\x42\x88\x7b\x00\x00\x00\xff\xff\xa8\x4b\xee\x77\xc3\x00\x00\x00") + +func _1647871329_add_community_archives_info_tableUpSqlBytes() ([]byte, error) { + return bindataRead( + __1647871329_add_community_archives_info_tableUpSql, + "1647871329_add_community_archives_info_table.up.sql", + ) +} + +func _1647871329_add_community_archives_info_tableUpSql() (*asset, error) { + bytes, err := _1647871329_add_community_archives_info_tableUpSqlBytes() + if err != nil { + return nil, err + } + + info := bindataFileInfo{name: "1647871329_add_community_archives_info_table.up.sql", size: 195, mode: os.FileMode(0664), modTime: time.Unix(1647871372, 0)} + a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xfd, 0x3d, 0x27, 0x93, 0x55, 0x14, 0x82, 0xe1, 0x23, 0x2a, 0xd, 0x22, 0xbb, 0x32, 0xe7, 0xf6, 0xd, 0xf7, 0x7a, 0x38, 0x54, 0x91, 0xe2, 0x97, 0xd2, 0xe, 0x3a, 0xc9, 0xb, 0x6a, 0xd9, 0x1a}} + return a, nil +} + var _docGo = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x2c\xc9\xb1\x0d\xc4\x20\x0c\x05\xd0\x9e\x29\xfe\x02\xd8\xfd\x6d\xe3\x4b\xac\x2f\x44\x82\x09\x78\x7f\xa5\x49\xfd\xa6\x1d\xdd\xe8\xd8\xcf\x55\x8a\x2a\xe3\x47\x1f\xbe\x2c\x1d\x8c\xfa\x6f\xe3\xb4\x34\xd4\xd9\x89\xbb\x71\x59\xb6\x18\x1b\x35\x20\xa2\x9f\x0a\x03\xa2\xe5\x0d\x00\x00\xff\xff\x60\xcd\x06\xbe\x4a\x00\x00\x00") func docGoBytes() ([]byte, error) { @@ -410,6 +431,8 @@ var _bindata = map[string]func() (*asset, error){ "1647868109_add_waku_messages_table.up.sql": _1647868109_add_waku_messages_tableUpSql, + "1647871329_add_community_archives_info_table.up.sql": _1647871329_add_community_archives_info_tableUpSql, + "doc.go": docGo, } @@ -454,16 +477,17 @@ type bintree struct { } var _bintree = &bintree{nil, map[string]*bintree{ - "1640111208_dummy.up.sql": &bintree{_1640111208_dummyUpSql, map[string]*bintree{}}, - "1642666031_add_removed_clock_to_bookmarks.up.sql": &bintree{_1642666031_add_removed_clock_to_bookmarksUpSql, map[string]*bintree{}}, - "1643644541_gif_api_key_setting.up.sql": &bintree{_1643644541_gif_api_key_settingUpSql, map[string]*bintree{}}, - "1644188994_recent_stickers.up.sql": &bintree{_1644188994_recent_stickersUpSql, map[string]*bintree{}}, - "1646659233_add_address_to_dapp_permisssion.up.sql": &bintree{_1646659233_add_address_to_dapp_permisssionUpSql, map[string]*bintree{}}, - "1646841105_add_emoji_account.up.sql": &bintree{_1646841105_add_emoji_accountUpSql, map[string]*bintree{}}, - "1647278782_display_name.up.sql": &bintree{_1647278782_display_nameUpSql, map[string]*bintree{}}, - "1647860168_add_torrent_config.up.sql": &bintree{_1647860168_add_torrent_configUpSql, map[string]*bintree{}}, - "1647862837_add_communities_settings_table.up.sql": &bintree{_1647862837_add_communities_settings_tableUpSql, map[string]*bintree{}}, - "1647868109_add_waku_messages_table.up.sql": &bintree{_1647868109_add_waku_messages_tableUpSql, map[string]*bintree{}}, + "1640111208_dummy.up.sql": &bintree{_1640111208_dummyUpSql, map[string]*bintree{}}, + "1642666031_add_removed_clock_to_bookmarks.up.sql": &bintree{_1642666031_add_removed_clock_to_bookmarksUpSql, map[string]*bintree{}}, + "1643644541_gif_api_key_setting.up.sql": &bintree{_1643644541_gif_api_key_settingUpSql, map[string]*bintree{}}, + "1644188994_recent_stickers.up.sql": &bintree{_1644188994_recent_stickersUpSql, map[string]*bintree{}}, + "1646659233_add_address_to_dapp_permisssion.up.sql": &bintree{_1646659233_add_address_to_dapp_permisssionUpSql, map[string]*bintree{}}, + "1646841105_add_emoji_account.up.sql": &bintree{_1646841105_add_emoji_accountUpSql, map[string]*bintree{}}, + "1647278782_display_name.up.sql": &bintree{_1647278782_display_nameUpSql, map[string]*bintree{}}, + "1647860168_add_torrent_config.up.sql": &bintree{_1647860168_add_torrent_configUpSql, map[string]*bintree{}}, + "1647862837_add_communities_settings_table.up.sql": &bintree{_1647862837_add_communities_settings_tableUpSql, map[string]*bintree{}}, + "1647868109_add_waku_messages_table.up.sql": &bintree{_1647868109_add_waku_messages_tableUpSql, map[string]*bintree{}}, + "1647871329_add_community_archives_info_table.up.sql": &bintree{_1647871329_add_community_archives_info_tableUpSql, map[string]*bintree{}}, "doc.go": &bintree{docGo, map[string]*bintree{}}, }} diff --git a/appdatabase/migrations/sql/1647871329_add_community_archives_info_table.up.sql b/appdatabase/migrations/sql/1647871329_add_community_archives_info_table.up.sql new file mode 100644 index 00000000000..738321b82f3 --- /dev/null +++ b/appdatabase/migrations/sql/1647871329_add_community_archives_info_table.up.sql @@ -0,0 +1,7 @@ +CREATE TABLE communities_archive_info ( + community_id TEXT PRIMARY KEY ON CONFLICT REPLACE, + magnetlink_clock INT NOT NULL DEFAULT 0, + last_message_archive_end_date INT NOT NULL DEFAULT 0 +) + + diff --git a/eth-node/types/topic.go b/eth-node/types/topic.go index 0b1cbed9301..218fc443c38 100644 --- a/eth-node/types/topic.go +++ b/eth-node/types/topic.go @@ -1,5 +1,9 @@ package types +import ( + "github.com/ethereum/go-ethereum/common/hexutil" +) + const ( // TopicLength is the expected length of the topic, in bytes TopicLength = 4 @@ -84,3 +88,16 @@ func MakeFullNodeBloom() []byte { } return bloom } + +func StringToTopic(s string) (t TopicType) { + str, _ := hexutil.Decode(s) + return BytesToTopic(str) +} + +func TopicTypeToByteArray(t TopicType) []byte { + topic := make([]byte, 4) + for i, b := range t { + topic[i] = b + } + return topic +} diff --git a/multiaccounts/accounts/database.go b/multiaccounts/accounts/database.go index 87c5a39d712..bad3b93ea98 100644 --- a/multiaccounts/accounts/database.go +++ b/multiaccounts/accounts/database.go @@ -977,3 +977,7 @@ func (db *Database) GifFavorites() (favorites json.RawMessage, err error) { } return favorites, nil } + +func (db *Database) GetNodeConfig() (*params.NodeConfig, error) { + return nodecfg.GetNodeConfig(db.db) +} diff --git a/protocol/communities/manager.go b/protocol/communities/manager.go index 63193bf7401..d55ed4adb52 100644 --- a/protocol/communities/manager.go +++ b/protocol/communities/manager.go @@ -4,12 +4,15 @@ import ( "crypto/ecdsa" "database/sql" "fmt" + "log" + "os" "strings" + "sync" "time" - _ "github.com/anacrolix/torrent" - _ "github.com/anacrolix/torrent/bencode" - _ "github.com/anacrolix/torrent/metainfo" + "github.com/anacrolix/torrent" + "github.com/anacrolix/torrent/bencode" + "github.com/anacrolix/torrent/metainfo" "github.com/golang/protobuf/proto" "github.com/google/uuid" @@ -18,23 +21,37 @@ import ( "github.com/status-im/status-go/eth-node/crypto" "github.com/status-im/status-go/eth-node/types" + "github.com/status-im/status-go/params" "github.com/status-im/status-go/protocol/common" "github.com/status-im/status-go/protocol/ens" "github.com/status-im/status-go/protocol/protobuf" "github.com/status-im/status-go/protocol/requests" + "github.com/status-im/status-go/protocol/transport" ) +var defaultAnnounceList = [][]string{ + {"udp://tracker.opentrackr.org:1337/announce"}, + {"udp://tracker.openbittorrent.com:6969/announce"}, +} +var pieceLength = 1024 + type Manager struct { - persistence *Persistence - ensSubscription chan []*ens.VerificationRecord - subscriptions []chan *Subscription - ensVerifier *ens.Verifier - identity *ecdsa.PublicKey - logger *zap.Logger - quit chan struct{} + persistence *Persistence + ensSubscription chan []*ens.VerificationRecord + subscriptions []chan *Subscription + ensVerifier *ens.Verifier + identity *ecdsa.PublicKey + logger *zap.Logger + transport *transport.Transport + quit chan struct{} + torrentConfig *params.TorrentConfig + torrentClient *torrent.Client + historyArchiveTasksWaitGroup sync.WaitGroup + historyArchiveTasks map[string]chan struct{} + torrentTasks map[string]metainfo.Hash } -func NewManager(identity *ecdsa.PublicKey, db *sql.DB, logger *zap.Logger, verifier *ens.Verifier) (*Manager, error) { +func NewManager(identity *ecdsa.PublicKey, db *sql.DB, logger *zap.Logger, verifier *ens.Verifier, transport *transport.Transport, torrentConfig *params.TorrentConfig) (*Manager, error) { if identity == nil { return nil, errors.New("empty identity") } @@ -47,9 +64,13 @@ func NewManager(identity *ecdsa.PublicKey, db *sql.DB, logger *zap.Logger, verif } manager := &Manager{ - logger: logger, - identity: identity, - quit: make(chan struct{}), + logger: logger, + identity: identity, + quit: make(chan struct{}), + transport: transport, + torrentConfig: torrentConfig, + historyArchiveTasks: make(map[string]chan struct{}), + torrentTasks: make(map[string]metainfo.Hash), persistence: &Persistence{ logger: logger, db: db, @@ -86,6 +107,12 @@ func (m *Manager) Start() error { if m.ensVerifier != nil { m.runENSVerificationLoop() } + + if m.torrentConfig != nil && m.torrentConfig.Enabled { + err := m.StartTorrentClient() + return err + } + return nil } @@ -113,9 +140,60 @@ func (m *Manager) Stop() error { for _, c := range m.subscriptions { close(c) } + m.StopTorrentClient() + return nil +} + +func (m *Manager) SetTorrentConfig(config *params.TorrentConfig) { + m.torrentConfig = config +} + +func (m *Manager) StartTorrentClient() error { + if m.TorrentClientStarted() { + return nil + } + + config := torrent.NewDefaultClientConfig() + config.SetListenAddr(":" + fmt.Sprint(m.torrentConfig.Port)) + config.Seed = true + + config.DataDir = m.torrentConfig.DataDir + + if _, err := os.Stat(m.torrentConfig.DataDir); os.IsNotExist(err) { + err := os.MkdirAll(m.torrentConfig.DataDir, 0700) + if err != nil { + return err + } + } + + log.Println("Starting torrent client at port: ", m.torrentConfig.Port) + // Instantiating the client will make it bootstrap and listen eagerly, + // so no go routine is needed here + client, err := torrent.NewClient(config) + if err != nil { + return err + } + m.torrentClient = client return nil } +func (m *Manager) StopTorrentClient() []error { + if m.TorrentClientStarted() { + m.StopHistoryArchiveTasksIntervals() + log.Println("Stopping torrent client") + errs := m.torrentClient.Close() + if len(errs) > 0 { + return errs + } + m.torrentClient = nil + } + return make([]error, 0) +} + +func (m *Manager) TorrentClientStarted() bool { + return m.torrentClient != nil +} + func (m *Manager) publish(subscription *Subscription) { for _, s := range m.subscriptions { select { @@ -721,6 +799,10 @@ func (m *Manager) JoinCommunity(id types.HexBytes) (*Community, error) { return community, nil } +func (m *Manager) UpdateMagnetlinkMessageClock(communityID types.HexBytes, clock uint64) error { + return m.persistence.UpdateMagnetlinkMessageClock(communityID, clock) +} + func (m *Manager) LeaveCommunity(id types.HexBytes) (*Community, error) { community, err := m.GetByID(id) if err != nil { @@ -961,6 +1043,512 @@ func (m *Manager) GetAdminCommunitiesChatIDs() (map[string]bool, error) { return chatIDs, nil } +func (m *Manager) GetCommunityChatsFilters(communityID types.HexBytes) ([]*transport.Filter, error) { + chatIDs, err := m.persistence.GetCommunityChatIDs(communityID) + if err != nil { + return nil, err + } + + filters := []*transport.Filter{} + for _, cid := range chatIDs { + filters = append(filters, m.transport.FilterByChatID(cid)) + } + return filters, nil +} + +func (m *Manager) GetCommunityChatsTopics(communityID types.HexBytes) ([]types.TopicType, error) { + filters, err := m.GetCommunityChatsFilters(communityID) + if err != nil { + return nil, err + } + + topics := []types.TopicType{} + for _, filter := range filters { + topics = append(topics, filter.Topic) + } + + return topics, nil +} + func (m *Manager) StoreWakuMessage(message *types.Message) error { return m.persistence.SaveWakuMessage(message) } + +func (m *Manager) GetLatestWakuMessageTimestamp(topics []types.TopicType) (uint64, error) { + return m.persistence.GetLatestWakuMessageTimestamp(topics) +} + +func (m *Manager) GetOldestWakuMessageTimestamp(topics []types.TopicType) (uint64, error) { + return m.persistence.GetOldestWakuMessageTimestamp(topics) +} + +func (m *Manager) GetLastMessageArchiveEndDate(communityID types.HexBytes) (uint64, error) { + return m.persistence.GetLastMessageArchiveEndDate(communityID) +} + +func (m *Manager) GetHistoryArchivePartitionStartTimestamp(communityID types.HexBytes) (uint64, error) { + filters, err := m.GetCommunityChatsFilters(communityID) + if err != nil { + m.logger.Warn("failed to get community chats filters", zap.Error(err)) + return 0, err + } + + if len(filters) == 0 { + // If we don't have chat filters, we likely don't have any chats + // associated to this community, which means there's nothing more + // to do here + return 0, nil + } + + topics := []types.TopicType{} + + for _, filter := range filters { + topics = append(topics, filter.Topic) + } + + lastArchiveEndDateTimestamp, err := m.GetLastMessageArchiveEndDate(communityID) + if err != nil { + m.logger.Debug("failed to get last archive end date", zap.Error(err)) + return 0, err + } + + if lastArchiveEndDateTimestamp == 0 { + // If we don't have a tracked last message archive end date, it + // means we haven't created an archive before, which means + // the next thing to look at is the oldest waku message timestamp for + // this community + lastArchiveEndDateTimestamp, err = m.GetOldestWakuMessageTimestamp(topics) + if err != nil { + m.logger.Warn("failed to get oldest waku message timestamp", zap.Error(err)) + return 0, err + } + if lastArchiveEndDateTimestamp == 0 { + // This means there's no waku message stored for this community so far + // (even after requesting possibly missed messages), so no messages exist yet that can be archived + return 0, nil + } + } + + return lastArchiveEndDateTimestamp, nil +} + +func (m *Manager) CreateAndSeedHistoryArchive(communityID types.HexBytes, topics []types.TopicType, startDate time.Time, endDate time.Time, partition time.Duration) error { + m.UnseedHistoryArchiveTorrent(communityID) + err := m.CreateHistoryArchiveTorrent(communityID, topics, startDate, endDate, partition) + if err != nil { + return err + } + return m.SeedHistoryArchiveTorrent(communityID) +} + +func (m *Manager) StartHistoryArchiveTasksInterval(community *Community, interval time.Duration, dispatchMagnetlink func(types.HexBytes)) { + id := community.IDString() + _, exists := m.historyArchiveTasks[id] + + if exists { + log.Println("History archive tasks interval already runs for community: ", id) + m.logger.Debug("History archive tasks interval already runs for community: ", zap.Any("id", id)) + return + } + + cancel := make(chan struct{}) + m.historyArchiveTasks[id] = cancel + m.historyArchiveTasksWaitGroup.Add(1) + + ticker := time.NewTicker(interval) + defer ticker.Stop() + + log.Println("Starting history archive tasks interval for community: ", id) + m.logger.Debug("Starting history archive tasks interval for community: ", zap.Any("id", id)) + for { + select { + case <-ticker.C: + log.Println("Executing history archive tasks for community: ", id) + lastArchiveEndDateTimestamp, err := m.GetHistoryArchivePartitionStartTimestamp(community.ID()) + if err != nil { + m.logger.Debug("failed to get last archive end date", zap.Error(err)) + continue + } + + if lastArchiveEndDateTimestamp == 0 { + // This means there are no waku messages for this community, + // so nothing to do here + continue + } + + topics, err := m.GetCommunityChatsTopics(community.ID()) + if err != nil { + m.logger.Debug("failed to get community chats topics", zap.Error(err)) + continue + } + + to := time.Unix(time.Now().Unix(), 0) + lastArchiveEndDate := time.Unix(int64(lastArchiveEndDateTimestamp), 0) + + err = m.CreateAndSeedHistoryArchive(community.ID(), topics, lastArchiveEndDate, to, interval) + if err != nil { + m.logger.Debug("failed to create and seed history archive", zap.Error(err)) + continue + } + dispatchMagnetlink(community.ID()) + case <-cancel: + m.UnseedHistoryArchiveTorrent(community.ID()) + delete(m.historyArchiveTasks, id) + m.historyArchiveTasksWaitGroup.Done() + return + } + } +} + +func (m *Manager) StopHistoryArchiveTasksIntervals() { + for _, t := range m.historyArchiveTasks { + close(t) + } + // Stoping archive interval tasks is async, so we need + // to wait for all of them to be closed before we shutdown + // the torrent client + m.historyArchiveTasksWaitGroup.Wait() +} + +func (m *Manager) StopHistoryArchiveTasksInterval(communityID types.HexBytes) { + task, ok := m.historyArchiveTasks[communityID.String()] + if ok { + log.Println("Stopping history archive tasks interval for community: ", communityID.String()) + close(task) + } +} + +type EncodedArchiveData struct { + padding int + bytes []byte +} + +func (m *Manager) CreateHistoryArchiveTorrent(communityID types.HexBytes, topics []types.TopicType, startDate time.Time, endDate time.Time, partition time.Duration) error { + + from := startDate + to := from.Add(partition) + if to.After(endDate) { + to = endDate + } + + archiveDir := m.torrentConfig.DataDir + "/" + communityID.String() + torrentDir := m.torrentConfig.TorrentDir + indexPath := archiveDir + "/index" + dataPath := archiveDir + "/data" + + wakuMessageArchiveIndexProto := &protobuf.WakuMessageArchiveIndex{} + wakuMessageArchiveIndex := make(map[string]*protobuf.WakuMessageArchiveIndexMetadata) + + if _, err := os.Stat(archiveDir); os.IsNotExist(err) { + err := os.MkdirAll(archiveDir, 0700) + if err != nil { + return err + } + } + if _, err := os.Stat(torrentDir); os.IsNotExist(err) { + err := os.MkdirAll(torrentDir, 0700) + if err != nil { + return err + } + } + + _, err := os.Stat(indexPath) + if err == nil { + wakuMessageArchiveIndexProto, err = m.loadHistoryArchiveIndexFromFile(communityID) + if err != nil { + return err + } + } + + var offset uint64 = 0 + + for hash, metadata := range wakuMessageArchiveIndexProto.Archives { + offset = offset + metadata.Size + wakuMessageArchiveIndex[hash] = metadata + } + + var encodedArchives []*EncodedArchiveData + topicsAsByteArrays := topicsAsByteArrays(topics) + + log.Println("Creating archives for start date: ", startDate, " till: ", endDate, " partitioned by ", partition) + for { + if from.Equal(endDate) || from.After(endDate) { + break + } + log.Println("Creating message archive for partition (", partition, "). From: ", from, " To: ", to) + messages, err := m.persistence.GetWakuMessagesByFilterTopic(topics, uint64(from.Unix()), uint64(to.Unix())) + if err != nil { + return err + } + + if len(messages) == 0 { + // No need to create an archive with zero messages + log.Println("No messages in this partition") + from = to + to = to.Add(partition) + if to.After(endDate) { + to = endDate + } + continue + } + + wakuMessageArchive := m.createWakuMessageArchive(from, to, messages, topicsAsByteArrays) + encodedArchive, err := proto.Marshal(wakuMessageArchive) + if err != nil { + return err + } + + rawSize := len(encodedArchive) + padding := 0 + size := 0 + + if rawSize > pieceLength { + size = rawSize + pieceLength - (rawSize % pieceLength) + padding = size - rawSize + } else { + padding = pieceLength - rawSize + size = rawSize + padding + } + + wakuMessageArchiveIndexMetadata := &protobuf.WakuMessageArchiveIndexMetadata{ + Metadata: wakuMessageArchive.Metadata, + Offset: offset, + Size: uint64(size), + Padding: uint64(padding), + } + + wakuMessageArchiveIndexMetadataBytes, err := proto.Marshal(wakuMessageArchiveIndexMetadata) + if err != nil { + return err + } + + wakuMessageArchiveIndex[crypto.Keccak256Hash(wakuMessageArchiveIndexMetadataBytes).String()] = wakuMessageArchiveIndexMetadata + encodedArchives = append(encodedArchives, &EncodedArchiveData{bytes: encodedArchive, padding: padding}) + from = to + to = to.Add(partition) + if to.After(endDate) { + to = endDate + } + offset = offset + uint64(rawSize) + uint64(padding) + } + + if len(encodedArchives) > 0 { + + dataBytes := make([]byte, 0) + if _, err := os.Stat(dataPath); err == nil { + dataBytes, err = os.ReadFile(dataPath) + if err != nil { + return err + } + } + + for _, encodedArchiveData := range encodedArchives { + dataBytes = append(dataBytes, encodedArchiveData.bytes...) + dataBytes = append(dataBytes, make([]byte, encodedArchiveData.padding)...) + } + + wakuMessageArchiveIndexProto.Archives = wakuMessageArchiveIndex + indexBytes, err := proto.Marshal(wakuMessageArchiveIndexProto) + if err != nil { + return err + } + + err = os.WriteFile(indexPath, indexBytes, 0644) + if err != nil { + return err + } + + err = os.WriteFile(dataPath, dataBytes, 0644) + if err != nil { + return err + } + + metaInfo := metainfo.MetaInfo{ + AnnounceList: defaultAnnounceList, + } + metaInfo.SetDefaults() + metaInfo.CreatedBy = common.PubkeyToHex(m.identity) + + info := metainfo.Info{ + PieceLength: int64(pieceLength), + } + + err = info.BuildFromFilePath(archiveDir) + if err != nil { + return err + } + + metaInfo.InfoBytes, err = bencode.Marshal(info) + if err != nil { + return err + } + + metaInfoBytes, err := bencode.Marshal(metaInfo) + if err != nil { + return err + } + + err = os.WriteFile(m.torrentFile(communityID.String()), metaInfoBytes, 0644) + if err != nil { + return err + } + } else { + log.Println("No archives created") + } + + lastMessageArchiveEndDate, err := m.persistence.GetLastMessageArchiveEndDate(communityID) + if err != nil { + return err + } + + if lastMessageArchiveEndDate > 0 { + err = m.persistence.UpdateLastMessageArchiveEndDate(communityID, uint64(from.Unix())) + } else { + err = m.persistence.SaveLastMessageArchiveEndDate(communityID, uint64(from.Unix())) + } + if err != nil { + return err + } + + log.Println("History archive created/updated for community: ", communityID.String()) + return nil +} + +func (m *Manager) SeedHistoryArchiveTorrent(communityID types.HexBytes) error { + m.UnseedHistoryArchiveTorrent(communityID) + + id := communityID.String() + torrentFile := m.torrentFile(id) + + metaInfo, err := metainfo.LoadFromFile(torrentFile) + if err != nil { + return err + } + + info, err := metaInfo.UnmarshalInfo() + if err != nil { + return err + } + + hash := metaInfo.HashInfoBytes() + m.torrentTasks[id] = hash + + if err != nil { + return err + } + + torrent, err := m.torrentClient.AddTorrent(metaInfo) + if err != nil { + return err + } + torrent.DownloadAll() + + log.Println("Seeding torrent for community: ", id) + log.Println("Magnetlink: ", metaInfo.Magnet(nil, &info).String()) + return nil +} + +func (m *Manager) UnseedHistoryArchiveTorrent(communityID types.HexBytes) { + id := communityID.String() + hash, exists := m.torrentTasks[id] + + if exists { + torrent, ok := m.torrentClient.Torrent(hash) + if ok { + log.Println("Unseeding and dropping torrent for community: ", id) + m.logger.Debug("Unseeding and dropping torrent for community: ", zap.Any("id", id)) + torrent.Drop() + delete(m.torrentTasks, id) + } + } +} + +func (m *Manager) IsSeedingHistoryArchiveTorrent(communityID types.HexBytes) bool { + id := communityID.String() + hash, _ := m.torrentTasks[id] + torrent, ok := m.torrentClient.Torrent(hash) + return ok && torrent.Seeding() +} + +func (m *Manager) GetHistoryArchiveMagnetlink(communityID types.HexBytes) (string, error) { + id := communityID.String() + torrentFile := m.torrentFile(id) + + metaInfo, err := metainfo.LoadFromFile(torrentFile) + if err != nil { + return "", err + } + + info, err := metaInfo.UnmarshalInfo() + if err != nil { + return "", err + } + + return metaInfo.Magnet(nil, &info).String(), nil +} + +func (m *Manager) createWakuMessageArchive(from time.Time, to time.Time, messages []types.Message, topics [][]byte) *protobuf.WakuMessageArchive { + var wakuMessages []*protobuf.WakuMessage + + for _, msg := range messages { + topic := types.TopicTypeToByteArray(msg.Topic) + wakuMessage := &protobuf.WakuMessage{ + Sig: msg.Sig, + Timestamp: uint64(msg.Timestamp), + Topic: topic, + Payload: msg.Payload, + Padding: msg.Padding, + Hash: msg.Hash, + } + wakuMessages = append(wakuMessages, wakuMessage) + } + + metadata := protobuf.WakuMessageArchiveMetadata{ + From: uint64(from.Unix()), + To: uint64(to.Unix()), + ContentTopic: topics, + } + + wakuMessageArchive := &protobuf.WakuMessageArchive{ + Metadata: &metadata, + Messages: wakuMessages, + } + return wakuMessageArchive +} + +func (m *Manager) loadHistoryArchiveIndexFromFile(communityID types.HexBytes) (*protobuf.WakuMessageArchiveIndex, error) { + wakuMessageArchiveIndexProto := &protobuf.WakuMessageArchiveIndex{} + + indexPath := m.archiveIndexFile(communityID.String()) + indexData, err := os.ReadFile(indexPath) + if err != nil { + return nil, err + } + + err = proto.Unmarshal(indexData, wakuMessageArchiveIndexProto) + if err != nil { + return nil, err + } + return wakuMessageArchiveIndexProto, nil +} + +func (m *Manager) torrentFile(communityID string) string { + return m.torrentConfig.TorrentDir + "/" + communityID + ".torrent" +} + +func (m *Manager) archiveIndexFile(communityID string) string { + return m.torrentConfig.DataDir + "/" + communityID + "/index" +} + +func (m *Manager) archiveDataFile(communityID string) string { + return m.torrentConfig.DataDir + "/" + communityID + "/data" +} + +func topicsAsByteArrays(topics []types.TopicType) [][]byte { + var topicsAsByteArrays [][]byte + for _, t := range topics { + topic := types.TopicTypeToByteArray(t) + topicsAsByteArrays = append(topicsAsByteArrays, topic) + } + return topicsAsByteArrays +} diff --git a/protocol/communities/manager_test.go b/protocol/communities/manager_test.go index 92aec790de3..1aaa50bae79 100644 --- a/protocol/communities/manager_test.go +++ b/protocol/communities/manager_test.go @@ -2,9 +2,16 @@ package communities import ( "bytes" + "io/ioutil" + "os" "testing" + "time" + "github.com/status-im/status-go/appdatabase" + "github.com/status-im/status-go/eth-node/types" + "github.com/status-im/status-go/params" "github.com/status-im/status-go/protocol/requests" + "github.com/status-im/status-go/protocol/transport" "github.com/golang/protobuf/proto" _ "github.com/mutecomm/go-sqlcipher" // require go-sqlcipher that overrides default implementation @@ -26,12 +33,17 @@ type ManagerSuite struct { } func (s *ManagerSuite) SetupTest() { - db, err := sqlite.OpenInMemory() - s.Require().NoError(err) + dbPath, err := ioutil.TempFile("", "") + s.NoError(err, "creating temp file for db") + db, err := appdatabase.InitializeDB(dbPath.Name(), "") + s.NoError(err, "creating sqlite db instance") + err = sqlite.Migrate(db) + s.NoError(err, "protocol migrate") + key, err := crypto.GenerateKey() s.Require().NoError(err) s.Require().NoError(err) - m, err := NewManager(&key.PublicKey, db, nil, nil) + m, err := NewManager(&key.PublicKey, db, nil, nil, nil, nil) s.Require().NoError(err) s.Require().NoError(m.Start()) s.manager = m @@ -107,16 +119,395 @@ func (s *ManagerSuite) TestEditCommunity() { func (s *ManagerSuite) TestGetAdminCommuniesChatIDs() { + community, _, err := s.buildCommunityWithChat() + s.Require().NoError(err) + s.Require().NotNil(community) + + adminChatIDs, err := s.manager.GetAdminCommunitiesChatIDs() + s.Require().NoError(err) + s.Require().Len(adminChatIDs, 1) +} + +func (s *ManagerSuite) TestStartAndStopTorrentClient() { + torrentConfig := buildTorrentConfig() + s.manager.SetTorrentConfig(&torrentConfig) + + err := s.manager.StartTorrentClient() + s.Require().NoError(err) + s.Require().NotNil(s.manager.torrentClient) + defer s.manager.StopTorrentClient() + + _, err = os.Stat(torrentConfig.DataDir) + s.Require().NoError(err) + s.Require().Equal(s.manager.TorrentClientStarted(), true) +} + +func (s *ManagerSuite) TestStartHistoryArchiveTasksInterval() { + + torrentConfig := buildTorrentConfig() + s.manager.SetTorrentConfig(&torrentConfig) + + err := s.manager.StartTorrentClient() + defer s.manager.StopTorrentClient() + + community, _, err := s.buildCommunityWithChat() + s.Require().NoError(err) + + interval := 10 * time.Second + go s.manager.StartHistoryArchiveTasksInterval(community, interval, func(id types.HexBytes) {}) + // Due to async exec we need to wait a bit until we check + // the task count. + time.Sleep(5 * time.Second) + s.Require().Len(s.manager.historyArchiveTasks, 1) + + // We wait another 5 seconds to ensure the first tick has kicked in + time.Sleep(5 * time.Second) + + _, err = os.Stat(s.manager.torrentFile(community.IDString())) + s.Require().Error(err) + + s.manager.StopHistoryArchiveTasksInterval(community.ID()) + s.manager.historyArchiveTasksWaitGroup.Wait() + s.Require().Len(s.manager.historyArchiveTasks, 0) +} + +func (s *ManagerSuite) TestStopHistoryArchiveTasksIntervals() { + + torrentConfig := buildTorrentConfig() + s.manager.SetTorrentConfig(&torrentConfig) + + err := s.manager.StartTorrentClient() + defer s.manager.StopTorrentClient() + + community, _, err := s.buildCommunityWithChat() + s.Require().NoError(err) + + interval := 10 * time.Second + go s.manager.StartHistoryArchiveTasksInterval(community, interval, func(id types.HexBytes) {}) + + time.Sleep(2 * time.Second) + s.Require().Len(s.manager.historyArchiveTasks, 1) + s.manager.StopHistoryArchiveTasksIntervals() + s.Require().Len(s.manager.historyArchiveTasks, 0) +} + +func (s *ManagerSuite) TestStopTorrentClient_ShouldStopHistoryArchiveTasks() { + torrentConfig := buildTorrentConfig() + s.manager.SetTorrentConfig(&torrentConfig) + + err := s.manager.StartTorrentClient() + defer s.manager.StopTorrentClient() + + community, _, err := s.buildCommunityWithChat() + s.Require().NoError(err) + + interval := 10 * time.Second + go s.manager.StartHistoryArchiveTasksInterval(community, interval, func(id types.HexBytes) {}) + // Due to async exec we need to wait a bit until we check + // the task count. + time.Sleep(2 * time.Second) + s.Require().Len(s.manager.historyArchiveTasks, 1) + + errs := s.manager.StopTorrentClient() + s.Require().Len(errs, 0) + s.Require().Len(s.manager.historyArchiveTasks, 0) +} + +func (s *ManagerSuite) TestCreateHistoryArchiveTorrent_WithoutMessages() { + + torrentConfig := buildTorrentConfig() + s.manager.SetTorrentConfig(&torrentConfig) + + community, chatID, err := s.buildCommunityWithChat() + s.Require().NoError(err) + + topic := types.BytesToTopic(transport.ToTopic(chatID)) + topics := []types.TopicType{topic} + + // Time range of 7 days + startDate := time.Date(2020, 1, 1, 00, 00, 00, 0, time.UTC) + endDate := time.Date(2020, 1, 7, 00, 00, 00, 0, time.UTC) + // Partition of 7 days + partition := 7 * 24 * time.Hour + + err = s.manager.CreateHistoryArchiveTorrent(community.ID(), topics, startDate, endDate, partition) + s.Require().NoError(err) + + // There are no waku messages in the database so we don't expect + // any archives to be created + _, err = os.Stat(s.manager.archiveDataFile(community.IDString())) + s.Require().Error(err) + _, err = os.Stat(s.manager.archiveIndexFile(community.IDString())) + s.Require().Error(err) + _, err = os.Stat(s.manager.torrentFile(community.IDString())) + s.Require().Error(err) +} + +func (s *ManagerSuite) TestCreateHistoryArchiveTorrent_ShouldCreateArchive() { + torrentConfig := buildTorrentConfig() + s.manager.SetTorrentConfig(&torrentConfig) + + community, chatID, err := s.buildCommunityWithChat() + s.Require().NoError(err) + + topic := types.BytesToTopic(transport.ToTopic(chatID)) + topics := []types.TopicType{topic} + + // Time range of 7 days + startDate := time.Date(2020, 1, 1, 00, 00, 00, 0, time.UTC) + endDate := time.Date(2020, 1, 7, 00, 00, 00, 0, time.UTC) + // Partition of 7 days, this should create a single archive + partition := 7 * 24 * time.Hour + + message1 := buildMessage(startDate.Add(1*time.Hour), topic, []byte{1}) + message2 := buildMessage(startDate.Add(2*time.Hour), topic, []byte{2}) + // This message is outside of the startDate-endDate range and should not + // be part of the archive + message3 := buildMessage(endDate.Add(2*time.Hour), topic, []byte{3}) + + err = s.manager.StoreWakuMessage(&message1) + s.Require().NoError(err) + err = s.manager.StoreWakuMessage(&message2) + s.Require().NoError(err) + err = s.manager.StoreWakuMessage(&message3) + s.Require().NoError(err) + + err = s.manager.CreateHistoryArchiveTorrent(community.ID(), topics, startDate, endDate, partition) + s.Require().NoError(err) + + _, err = os.Stat(s.manager.archiveDataFile(community.IDString())) + s.Require().NoError(err) + _, err = os.Stat(s.manager.archiveIndexFile(community.IDString())) + s.Require().NoError(err) + _, err = os.Stat(s.manager.torrentFile(community.IDString())) + s.Require().NoError(err) + + index, err := s.manager.loadHistoryArchiveIndexFromFile(community.ID()) + s.Require().NoError(err) + s.Require().Len(index.Archives, 1) + + totalData, err := os.ReadFile(s.manager.archiveDataFile(community.IDString())) + s.Require().NoError(err) + + for _, metadata := range index.Archives { + archive := &protobuf.WakuMessageArchive{} + data := totalData[metadata.Offset : metadata.Offset+metadata.Size-metadata.Padding] + + err = proto.Unmarshal(data, archive) + s.Require().NoError(err) + + s.Require().Len(archive.Messages, 2) + } +} + +func (s *ManagerSuite) TestCreateHistoryArchiveTorrent_ShouldCreateMultipleArchives() { + torrentConfig := buildTorrentConfig() + s.manager.SetTorrentConfig(&torrentConfig) + + community, chatID, err := s.buildCommunityWithChat() + s.Require().NoError(err) + + topic := types.BytesToTopic(transport.ToTopic(chatID)) + topics := []types.TopicType{topic} + + // Time range of 3 weeks + startDate := time.Date(2020, 1, 1, 00, 00, 00, 0, time.UTC) + endDate := time.Date(2020, 1, 21, 00, 00, 00, 0, time.UTC) + // 7 days partition, this should create three archives + partition := 7 * 24 * time.Hour + + message1 := buildMessage(startDate.Add(1*time.Hour), topic, []byte{1}) + message2 := buildMessage(startDate.Add(2*time.Hour), topic, []byte{2}) + // We expect 2 archives to be created for startDate - endDate of each + // 7 days of data. This message should end up in the second archive + message3 := buildMessage(startDate.Add(8*24*time.Hour), topic, []byte{3}) + // This one should end up in the third archive + message4 := buildMessage(startDate.Add(14*24*time.Hour), topic, []byte{4}) + + err = s.manager.StoreWakuMessage(&message1) + s.Require().NoError(err) + err = s.manager.StoreWakuMessage(&message2) + s.Require().NoError(err) + err = s.manager.StoreWakuMessage(&message3) + s.Require().NoError(err) + err = s.manager.StoreWakuMessage(&message4) + s.Require().NoError(err) + + err = s.manager.CreateHistoryArchiveTorrent(community.ID(), topics, startDate, endDate, partition) + s.Require().NoError(err) + + index, err := s.manager.loadHistoryArchiveIndexFromFile(community.ID()) + s.Require().NoError(err) + s.Require().Len(index.Archives, 3) + + totalData, err := os.ReadFile(s.manager.archiveDataFile(community.IDString())) + s.Require().NoError(err) + + // First archive has 2 messages + // Second archive has 1 message + // Third archive has 1 message + fromMap := map[uint64]int{ + uint64(startDate.Unix()): 2, + uint64(startDate.Add(partition).Unix()): 1, + uint64(startDate.Add(partition * 2).Unix()): 1, + } + + for _, metadata := range index.Archives { + archive := &protobuf.WakuMessageArchive{} + data := totalData[metadata.Offset : metadata.Offset+metadata.Size-metadata.Padding] + + err = proto.Unmarshal(data, archive) + s.Require().NoError(err) + s.Require().Len(archive.Messages, fromMap[metadata.Metadata.From]) + } +} + +func (s *ManagerSuite) TestCreateHistoryArchiveTorrent_ShouldAppendArchives() { + torrentConfig := buildTorrentConfig() + s.manager.SetTorrentConfig(&torrentConfig) + + community, chatID, err := s.buildCommunityWithChat() + s.Require().NoError(err) + + topic := types.BytesToTopic(transport.ToTopic(chatID)) + topics := []types.TopicType{topic} + + // Time range of 1 week + startDate := time.Date(2020, 1, 1, 00, 00, 00, 0, time.UTC) + endDate := time.Date(2020, 1, 7, 00, 00, 00, 0, time.UTC) + // 7 days partition, this should create one archive + partition := 7 * 24 * time.Hour + + message1 := buildMessage(startDate.Add(1*time.Hour), topic, []byte{1}) + err = s.manager.StoreWakuMessage(&message1) + s.Require().NoError(err) + + err = s.manager.CreateHistoryArchiveTorrent(community.ID(), topics, startDate, endDate, partition) + s.Require().NoError(err) + + index, err := s.manager.loadHistoryArchiveIndexFromFile(community.ID()) + s.Require().NoError(err) + s.Require().Len(index.Archives, 1) + + // Time range of next week + startDate = time.Date(2020, 1, 7, 00, 00, 00, 0, time.UTC) + endDate = time.Date(2020, 1, 14, 00, 00, 00, 0, time.UTC) + + message2 := buildMessage(startDate.Add(2*time.Hour), topic, []byte{2}) + err = s.manager.StoreWakuMessage(&message2) + s.Require().NoError(err) + + err = s.manager.CreateHistoryArchiveTorrent(community.ID(), topics, startDate, endDate, partition) + s.Require().NoError(err) + + index, err = s.manager.loadHistoryArchiveIndexFromFile(community.ID()) + s.Require().NoError(err) + s.Require().Len(index.Archives, 2) +} + +func (s *ManagerSuite) TestSeedHistoryArchiveTorrent() { + torrentConfig := buildTorrentConfig() + s.manager.SetTorrentConfig(&torrentConfig) + + err := s.manager.StartTorrentClient() + defer s.manager.StopTorrentClient() + + community, chatID, err := s.buildCommunityWithChat() + s.Require().NoError(err) + + topic := types.BytesToTopic(transport.ToTopic(chatID)) + topics := []types.TopicType{topic} + + startDate := time.Date(2020, 1, 1, 00, 00, 00, 0, time.UTC) + endDate := time.Date(2020, 1, 7, 00, 00, 00, 0, time.UTC) + partition := 7 * 24 * time.Hour + + message1 := buildMessage(startDate.Add(1*time.Hour), topic, []byte{1}) + err = s.manager.StoreWakuMessage(&message1) + s.Require().NoError(err) + + err = s.manager.CreateHistoryArchiveTorrent(community.ID(), topics, startDate, endDate, partition) + s.Require().NoError(err) + + err = s.manager.SeedHistoryArchiveTorrent(community.ID()) + s.Require().NoError(err) + s.Require().Len(s.manager.torrentTasks, 1) + + metaInfoHash := s.manager.torrentTasks[community.IDString()] + torrent, ok := s.manager.torrentClient.Torrent(metaInfoHash) + defer torrent.Drop() + + s.Require().Equal(ok, true) + s.Require().Equal(torrent.Seeding(), true) +} + +func (s *ManagerSuite) TestUnseedHistoryArchiveTorrent() { + torrentConfig := buildTorrentConfig() + s.manager.SetTorrentConfig(&torrentConfig) + + err := s.manager.StartTorrentClient() + defer s.manager.StopTorrentClient() + + community, chatID, err := s.buildCommunityWithChat() + s.Require().NoError(err) + + topic := types.BytesToTopic(transport.ToTopic(chatID)) + topics := []types.TopicType{topic} + + startDate := time.Date(2020, 1, 1, 00, 00, 00, 0, time.UTC) + endDate := time.Date(2020, 1, 7, 00, 00, 00, 0, time.UTC) + partition := 7 * 24 * time.Hour + + message1 := buildMessage(startDate.Add(1*time.Hour), topic, []byte{1}) + err = s.manager.StoreWakuMessage(&message1) + s.Require().NoError(err) + + err = s.manager.CreateHistoryArchiveTorrent(community.ID(), topics, startDate, endDate, partition) + s.Require().NoError(err) + + err = s.manager.SeedHistoryArchiveTorrent(community.ID()) + s.Require().NoError(err) + s.Require().Len(s.manager.torrentTasks, 1) + + metaInfoHash := s.manager.torrentTasks[community.IDString()] + + s.manager.UnseedHistoryArchiveTorrent(community.ID()) + _, ok := s.manager.torrentClient.Torrent(metaInfoHash) + s.Require().Equal(ok, false) +} + +func buildTorrentConfig() params.TorrentConfig { + torrentConfig := params.TorrentConfig{ + Enabled: true, + DataDir: os.TempDir() + "/archivedata", + TorrentDir: os.TempDir() + "/torrents", + Port: 9999, + } + return torrentConfig +} + +func buildMessage(timestamp time.Time, topic types.TopicType, hash []byte) types.Message { + message := types.Message{ + Sig: []byte{1}, + Timestamp: uint32(timestamp.Unix()), + Topic: topic, + Payload: []byte{1}, + Padding: []byte{1}, + Hash: hash, + } + return message +} + +func (s *ManagerSuite) buildCommunityWithChat() (*Community, string, error) { createRequest := &requests.CreateCommunity{ Name: "status", Description: "status community description", Membership: protobuf.CommunityPermissions_NO_MEMBERSHIP, } - community, err := s.manager.CreateCommunity(createRequest) - s.Require().NoError(err) - s.Require().NotNil(community) - + if err != nil { + return nil, "", err + } chat := &protobuf.CommunityChat{ Identity: &protobuf.ChatIdentity{ DisplayName: "added-chat", @@ -127,11 +518,15 @@ func (s *ManagerSuite) TestGetAdminCommuniesChatIDs() { }, Members: make(map[string]*protobuf.CommunityMember), } + _, changes, err := s.manager.CreateChat(community.ID(), chat) + if err != nil { + return nil, "", err + } - _, _, err = s.manager.CreateChat(community.ID(), chat) - s.Require().NoError(err) - - adminChatIDs, err := s.manager.GetAdminCommunitiesChatIDs() - s.Require().NoError(err) - s.Require().Len(adminChatIDs, 1) + chatID := "" + for cID := range changes.ChatsAdded { + chatID = cID + break + } + return community, chatID, nil } diff --git a/protocol/communities/persistence.go b/protocol/communities/persistence.go index cf3ee7f4039..5b7cdf6b1d1 100644 --- a/protocol/communities/persistence.go +++ b/protocol/communities/persistence.go @@ -5,6 +5,7 @@ import ( "crypto/ecdsa" "database/sql" "errors" + "fmt" "github.com/golang/protobuf/proto" "go.uber.org/zap" @@ -22,6 +23,7 @@ type Persistence struct { var ErrOldRequestToJoin = errors.New("old request to join") +const OR = " OR " const communitiesBaseQuery = `SELECT c.id, c.private_key, c.description,c.joined,c.verified,c.muted,r.clock FROM communities_communities c LEFT JOIN communities_requests_to_join r ON c.id = r.community_id AND r.public_key = ?` func (p *Persistence) SaveCommunity(community *Community) error { @@ -359,6 +361,105 @@ func (p *Persistence) SaveWakuMessage(message *types.Message) error { return err } +func wakuMessageTimestampQuery(topics []types.TopicType) string { + query := " FROM waku_messages WHERE " + for i, topic := range topics { + query += `topic = "` + topic.String() + `"` + if i < len(topics)-1 { + query += OR + } + } + return query +} + +func (p *Persistence) GetOldestWakuMessageTimestamp(topics []types.TopicType) (uint64, error) { + var timestamp sql.NullInt64 + query := "SELECT MIN(timestamp)" + query += wakuMessageTimestampQuery(topics) + err := p.db.QueryRow(query).Scan(×tamp) + return uint64(timestamp.Int64), err +} + +func (p *Persistence) GetLatestWakuMessageTimestamp(topics []types.TopicType) (uint64, error) { + var timestamp sql.NullInt64 + query := "SELECT MAX(timestamp)" + query += wakuMessageTimestampQuery(topics) + err := p.db.QueryRow(query).Scan(×tamp) + return uint64(timestamp.Int64), err +} + +func (p *Persistence) GetWakuMessagesByFilterTopic(topics []types.TopicType, from uint64, to uint64) ([]types.Message, error) { + + query := "SELECT sig, timestamp, topic, payload, padding, hash FROM waku_messages WHERE timestamp >= " + fmt.Sprint(from) + " AND timestamp < " + fmt.Sprint(to) + " AND (" + + for i, topic := range topics { + query += `topic = "` + topic.String() + `"` + if i < len(topics)-1 { + query += OR + } + } + query += ")" + + rows, err := p.db.Query(query) + if err != nil { + return nil, err + } + defer rows.Close() + messages := []types.Message{} + + for rows.Next() { + msg := types.Message{} + var topicStr string + var hashStr string + err := rows.Scan(&msg.Sig, &msg.Timestamp, &topicStr, &msg.Payload, &msg.Padding, &hashStr) + if err != nil { + return nil, err + } + msg.Topic = types.StringToTopic(topicStr) + msg.Hash = types.Hex2Bytes(hashStr) + messages = append(messages, msg) + } + + return messages, nil +} + +func (p *Persistence) UpdateMagnetlinkMessageClock(communityID types.HexBytes, clock uint64) error { + _, err := p.db.Exec(`UPDATE communities_archive_info SET + magnetlink_clock = ? + WHERE community_id = ?`, + clock, + communityID.String()) + return err +} + +func (p *Persistence) SaveLastMessageArchiveEndDate(communityID types.HexBytes, endDate uint64) error { + _, err := p.db.Exec(`INSERT INTO communities_archive_info (last_message_archive_end_date, community_id) VALUES (?, ?)`, + endDate, + communityID.String()) + return err +} + +func (p *Persistence) UpdateLastMessageArchiveEndDate(communityID types.HexBytes, endDate uint64) error { + _, err := p.db.Exec(`UPDATE communities_archive_info SET + last_message_archive_end_date = ? + WHERE community_id = ?`, + endDate, + communityID.String()) + return err +} + +func (p *Persistence) GetLastMessageArchiveEndDate(communityID types.HexBytes) (uint64, error) { + + var lastMessageArchiveEndDate uint64 + err := p.db.QueryRow(`SELECT last_message_archive_end_date FROM communities_archive_info WHERE community_id = ?`, communityID.String()).Scan(&lastMessageArchiveEndDate) + if err == sql.ErrNoRows { + return 0, nil + } else if err != nil { + return 0, err + } + return lastMessageArchiveEndDate, nil +} + func (p *Persistence) GetCommunitiesSettings() ([]CommunitySettings, error) { rows, err := p.db.Query("SELECT community_id, message_archive_seeding_enabled, message_archive_fetching_enabled FROM communities_settings") if err != nil { @@ -427,3 +528,22 @@ func (p *Persistence) UpdateCommunitySettings(communitySettings CommunitySetting ) return err } + +func (p *Persistence) GetCommunityChatIDs(communityID types.HexBytes) ([]string, error) { + rows, err := p.db.Query(`SELECT id FROM chats WHERE community_id = ?`, communityID.String()) + if err != nil { + return nil, err + } + defer rows.Close() + + ids := []string{} + for rows.Next() { + id := "" + err := rows.Scan(&id) + if err != nil { + return nil, err + } + ids = append(ids, id) + } + return ids, nil +} diff --git a/protocol/messenger.go b/protocol/messenger.go index 988b8a408d7..671967b5629 100644 --- a/protocol/messenger.go +++ b/protocol/messenger.go @@ -149,10 +149,11 @@ type peerStatus struct { } type mailserverCycle struct { sync.RWMutex - activeMailserver *mailserversDB.Mailserver - peers map[string]peerStatus - events chan *p2p.PeerEvent - subscription event.Subscription + activeMailserver *mailserversDB.Mailserver + peers map[string]peerStatus + events chan *p2p.PeerEvent + subscription event.Subscription + availabilitySubscriptions []chan struct{} } type dbConfig struct { @@ -378,7 +379,7 @@ func NewMessenger( ensVerifier := ens.New(node, logger, transp, database, c.verifyENSURL, c.verifyENSContractAddress) - communitiesManager, err := communities.NewManager(&identity.PublicKey, database, logger, ensVerifier) + communitiesManager, err := communities.NewManager(&identity.PublicKey, database, logger, ensVerifier, transp, c.torrentConfig) if err != nil { return nil, err } @@ -420,7 +421,8 @@ func NewMessenger( peerStore: peerStore, mailservers: mailservers, mailserverCycle: mailserverCycle{ - peers: make(map[string]peerStatus), + peers: make(map[string]peerStatus), + availabilitySubscriptions: make([]chan struct{}, 0), }, mailserversDatabase: c.mailserversDatabase, account: c.account, @@ -651,6 +653,17 @@ func (m *Messenger) Start() (*MessengerResponse, error) { return nil, err } + if m.config.torrentConfig.Enabled { + adminCommunities, err := m.communitiesManager.Created() + if err == nil && len(adminCommunities) > 0 { + available := m.SubscribeMailserverAvailable() + go func() { + <-available + m.InitHistoryArchiveTasks(adminCommunities) + }() + } + } + err = m.httpServer.Start() if err != nil { return nil, err diff --git a/protocol/messenger_communities.go b/protocol/messenger_communities.go index 8eba03f1b31..35587b343bb 100644 --- a/protocol/messenger_communities.go +++ b/protocol/messenger_communities.go @@ -4,6 +4,7 @@ import ( "context" "crypto/ecdsa" "fmt" + "log" "time" "github.com/golang/protobuf/proto" @@ -18,6 +19,8 @@ import ( "github.com/status-im/status-go/protocol/transport" ) +var messageArchiveInterval = 5 * time.Minute + func (m *Messenger) publishOrg(org *communities.Community) error { m.logger.Debug("publishing org", zap.String("org-id", org.IDString()), zap.Any("org", org)) payload, err := org.MarshaledDescription() @@ -417,6 +420,8 @@ func (m *Messenger) LeaveCommunity(communityID types.HexBytes) (*MessengerRespon return nil, err } + m.communitiesManager.StopHistoryArchiveTasksInterval(communityID) + if com, ok := mr.communities[communityID.String()]; ok { err = m.syncCommunity(context.Background(), com) if err != nil { @@ -592,6 +597,15 @@ func (m *Messenger) CreateCommunity(request *requests.CreateCommunity) (*Messeng return nil, err } + if m.config.torrentConfig.Enabled && communitySettings.HistoryArchiveSupportEnabled { + go m.communitiesManager.StartHistoryArchiveTasksInterval(community, messageArchiveInterval, func(communityID types.HexBytes) { + err := m.dispatchMagnetlinkMessage(communityID) + if err != nil { + m.logger.Debug("failed to dispatch magnetlink message", zap.Error(err)) + } + }) + } + return response, nil } @@ -614,6 +628,18 @@ func (m *Messenger) EditCommunity(request *requests.EditCommunity) (*MessengerRe return nil, err } + id := community.ID() + + if m.config.torrentConfig.Enabled { + if !communitySettings.HistoryArchiveSupportEnabled { + m.communitiesManager.StopHistoryArchiveTasksInterval(id) + } else if !m.communitiesManager.IsSeedingHistoryArchiveTorrent(id) { + var communities []*communities.Community + communities = append(communities, community) + go m.InitHistoryArchiveTasks(communities) + } + } + response := &MessengerResponse{} response.AddCommunity(community) response.AddCommunitySettings(&communitySettings) @@ -652,6 +678,11 @@ func (m *Messenger) ImportCommunity(ctx context.Context, key *ecdsa.PrivateKey) return nil, err } + if m.config.torrentConfig.Enabled { + var communities []*communities.Community + communities = append(communities, community) + go m.InitHistoryArchiveTasks(communities) + } return response, nil } @@ -1098,6 +1129,226 @@ func (m *Messenger) handleSyncCommunity(messageState *ReceivedMessageState, sync return nil } +func (m *Messenger) InitHistoryArchiveTasks(communities []*communities.Community) { + + dispatchMagnetlink := func(communityID types.HexBytes) { + err := m.dispatchMagnetlinkMessage(communityID) + if err != nil { + m.logger.Debug("failed to dispatch magnetlink message", zap.Error(err)) + } + } + + for _, c := range communities { + + if c.Joined() { + settings, err := m.communitiesManager.GetCommunitySettingsByID(c.ID()) + if err != nil { + log.Println("failed to get community settings", err) + m.logger.Debug("failed to get community settings", zap.Error(err)) + continue + } + if !settings.HistoryArchiveSupportEnabled { + continue + } + + filters, err := m.communitiesManager.GetCommunityChatsFilters(c.ID()) + if err != nil { + log.Println("failed to get community chats filters", err) + m.logger.Debug("failed to get community chats filters", zap.Error(err)) + continue + } + + if len(filters) == 0 { + log.Println("no filters or chats for this community", c.IDString()) + m.logger.Debug("no filters or chats for this community") + continue + } + + topics := []types.TopicType{} + + for _, filter := range filters { + topics = append(topics, filter.Topic) + } + + // First we need to know the timestamp of the latest waku message + // we've received for this community, so we can request messages we've + // possibly missed since then + latestWakuMessageTimestamp, err := m.communitiesManager.GetLatestWakuMessageTimestamp(topics) + if err != nil { + log.Println("failed to get Latest waku message timestamp", err) + m.logger.Debug("failed to get Latest waku message timestamp", zap.Error(err)) + continue + } + + if latestWakuMessageTimestamp == 0 { + // This means we don't have any waku messages for this community + // yet, either because no messages were sent in the community so far, + // or because messages haven't reached this node + // + // In this case we default to requesting messages from the store nodes + // for the past 30 days + latestWakuMessageTimestamp = uint64(time.Now().AddDate(0, 0, -30).Unix()) + } + + // Request possibly missed waku messages for community + _, err = m.syncFiltersFrom(filters, uint32(latestWakuMessageTimestamp)) + if err != nil { + log.Println("failed to request missing messages", err) + m.logger.Debug("failed to request missing messages", zap.Error(err)) + continue + } + + // We figure out the end date of the last created archive and schedule + // the interval for creating future archives + // If the last end date is at least `interval` ago, we create an archive immediately first + lastArchiveEndDateTimestamp, err := m.communitiesManager.GetHistoryArchivePartitionStartTimestamp(c.ID()) + if err != nil { + log.Println("failed to get archive partition start timestamp", err) + m.logger.Debug("failed to get archive partition start timestamp", zap.Error(err)) + continue + } + + to := time.Now() + lastArchiveEndDate := time.Unix(int64(lastArchiveEndDateTimestamp), 0) + log.Println("Last archive end date: ", lastArchiveEndDate) + durationSinceLastArchive := to.Sub(lastArchiveEndDate) + + if lastArchiveEndDateTimestamp == 0 { + // No prior messages to be archived, so we just kick off the archive creation loop + // for future archives + go m.communitiesManager.StartHistoryArchiveTasksInterval(c, messageArchiveInterval, dispatchMagnetlink) + } else if durationSinceLastArchive < messageArchiveInterval { + // Last archive is less than `interval` old, wait until `interval` is complete, + // then create archive and kick off archive creation loop for future archives + // Seed current archive in the meantime + err := m.communitiesManager.SeedHistoryArchiveTorrent(c.ID()) + if err != nil { + m.logger.Debug("failed to seed history archive", zap.Error(err)) + } + timeToNextInterval := messageArchiveInterval - durationSinceLastArchive + + log.Println("Starting history archive tasks interval in: ", timeToNextInterval) + time.AfterFunc(timeToNextInterval, func() { + err := m.communitiesManager.CreateAndSeedHistoryArchive(c.ID(), topics, lastArchiveEndDate, to.Add(timeToNextInterval), messageArchiveInterval) + if err != nil { + m.logger.Debug("failed to get create and seed history archive", zap.Error(err)) + } + dispatchMagnetlink(c.ID()) + go m.communitiesManager.StartHistoryArchiveTasksInterval(c, messageArchiveInterval, dispatchMagnetlink) + }) + } else { + // Looks like the last archive was generated more than `interval` + // ago, so lets create a new archive now and then schedule the archive + // creation loop + err := m.communitiesManager.CreateAndSeedHistoryArchive(c.ID(), topics, lastArchiveEndDate, to, messageArchiveInterval) + if err != nil { + m.logger.Debug("failed to get create and seed history archive", zap.Error(err)) + } + dispatchMagnetlink(c.ID()) + + go m.communitiesManager.StartHistoryArchiveTasksInterval(c, messageArchiveInterval, dispatchMagnetlink) + } + } + } +} + +func (m *Messenger) dispatchMagnetlinkMessage(communityID types.HexBytes) error { + + community, err := m.communitiesManager.GetByIDString(communityID.String()) + if err != nil { + return err + } + + magnetlink, err := m.communitiesManager.GetHistoryArchiveMagnetlink(communityID) + if err != nil { + return err + } + + magnetLinkMessage := &protobuf.CommunityMessageArchiveMagnetlink{ + Clock: m.getTimesource().GetCurrentTime(), + MagnetUri: magnetlink, + } + + encodedMessage, err := proto.Marshal(magnetLinkMessage) + if err != nil { + return err + } + + chatID := community.MagnetlinkMessageChannelID() + rawMessage := common.RawMessage{ + LocalChatID: chatID, + Sender: community.PrivateKey(), + Payload: encodedMessage, + MessageType: protobuf.ApplicationMetadataMessage_COMMUNITY_ARCHIVE_MAGNETLINK, + SkipGroupMessageWrap: true, + } + + log.Println("Dispatching magnet link for community ", community.IDString()) + _, err = m.sender.SendPublic(context.Background(), chatID, rawMessage) + if err != nil { + return err + } + + err = m.communitiesManager.UpdateMagnetlinkMessageClock(communityID, magnetLinkMessage.Clock) + return err +} + +func (m *Messenger) EnableCommunityHistoryArchiveProtocol() error { + nodeConfig, err := m.settings.GetNodeConfig() + if err != nil { + return err + } + + if nodeConfig.TorrentConfig.Enabled { + return nil + } + + nodeConfig.TorrentConfig.Enabled = true + err = m.settings.SaveSetting("node-config", nodeConfig) + if err != nil { + return err + } + + m.config.torrentConfig = &nodeConfig.TorrentConfig + m.communitiesManager.SetTorrentConfig(&nodeConfig.TorrentConfig) + err = m.communitiesManager.StartTorrentClient() + if err != nil { + return err + } + + communities, err := m.communitiesManager.Created() + if err != nil { + return err + } + + if len(communities) > 0 { + go m.InitHistoryArchiveTasks(communities) + } + return nil +} + +func (m *Messenger) DisableCommunityHistoryArchiveProtocol() error { + + nodeConfig, err := m.settings.GetNodeConfig() + if err != nil { + return err + } + if !nodeConfig.TorrentConfig.Enabled { + return nil + } + + m.communitiesManager.StopTorrentClient() + + nodeConfig.TorrentConfig.Enabled = false + err = m.settings.SaveSetting("node-config", nodeConfig) + m.config.torrentConfig = &nodeConfig.TorrentConfig + m.communitiesManager.SetTorrentConfig(&nodeConfig.TorrentConfig) + if err != nil { + return err + } + return nil +} + func (m *Messenger) GetCommunitiesSettings() ([]communities.CommunitySettings, error) { settings, err := m.communitiesManager.GetCommunitiesSettings() if err != nil { diff --git a/protocol/messenger_mailserver.go b/protocol/messenger_mailserver.go index 34e3d0262f9..29e77d8a720 100644 --- a/protocol/messenger_mailserver.go +++ b/protocol/messenger_mailserver.go @@ -332,7 +332,7 @@ func getPrioritizedBatches() []int { return []int{1, 5, 10} } -func (m *Messenger) syncFilters(filters []*transport.Filter) (*MessengerResponse, error) { +func (m *Messenger) syncFiltersFrom(filters []*transport.Filter, lastRequest uint32) (*MessengerResponse, error) { response := &MessengerResponse{} topicInfo, err := m.mailserversDatabase.Topics() if err != nil { @@ -380,11 +380,18 @@ func (m *Messenger) syncFilters(filters []*transport.Filter) (*MessengerResponse } topicData, ok := topicsData[filter.Topic.String()] + var capToDefaultSyncPeriod = true if !ok { + if lastRequest == 0 { + lastRequest = defaultPeriodFromNow + } topicData = mailservers.MailserverTopic{ Topic: filter.Topic.String(), LastRequest: int(defaultPeriodFromNow), } + } else if lastRequest != 0 { + topicData.LastRequest = int(lastRequest) + capToDefaultSyncPeriod = false } batchID := topicData.LastRequest @@ -411,11 +418,13 @@ func (m *Messenger) syncFilters(filters []*transport.Filter) (*MessengerResponse batch, ok := batches[batchID] if !ok { - from, err := m.capToDefaultSyncPeriod(uint32(topicData.LastRequest)) - if err != nil { - return nil, err + from := uint32(topicData.LastRequest) + if capToDefaultSyncPeriod { + from, err = m.capToDefaultSyncPeriod(uint32(topicData.LastRequest)) + if err != nil { + return nil, err + } } - batch = MailserverBatch{From: from, To: to} } @@ -508,6 +517,10 @@ func (m *Messenger) syncFilters(filters []*transport.Filter) (*MessengerResponse return response, nil } +func (m *Messenger) syncFilters(filters []*transport.Filter) (*MessengerResponse, error) { + return m.syncFiltersFrom(filters, 0) +} + func (m *Messenger) calculateGapForChat(chat *Chat, from uint32) (*common.Message, error) { // Chat was never synced, no gap necessary if chat.SyncedTo == 0 { diff --git a/protocol/messenger_mailserver_cycle.go b/protocol/messenger_mailserver_cycle.go index 724c84dbe83..d885e17c0a4 100644 --- a/protocol/messenger_mailserver_cycle.go +++ b/protocol/messenger_mailserver_cycle.go @@ -486,6 +486,7 @@ func (m *Messenger) handleMailserverCycleEvent(connectedPeers []ConnectedPeer) e if m.mailserverCycle.activeMailserver != nil && id == m.mailserverCycle.activeMailserver.ID { m.logger.Info("mailserver available", zap.String("address", connectedPeer.UniqueID)) + m.EmitMailserverAvailable() signal.SendMailserverAvailable(m.mailserverCycle.activeMailserver.Address, m.mailserverCycle.activeMailserver.ID) } // Query mailserver @@ -650,3 +651,18 @@ func (m *Messenger) getPinnedMailserver() (*mailservers.Mailserver, error) { return nil, nil } + +func (m *Messenger) EmitMailserverAvailable() { + for _, s := range m.mailserverCycle.availabilitySubscriptions { + s <- struct{}{} + close(s) + l := len(m.mailserverCycle.availabilitySubscriptions) + m.mailserverCycle.availabilitySubscriptions = m.mailserverCycle.availabilitySubscriptions[:l-1] + } +} + +func (m *Messenger) SubscribeMailserverAvailable() chan struct{} { + c := make(chan struct{}) + m.mailserverCycle.availabilitySubscriptions = append(m.mailserverCycle.availabilitySubscriptions, c) + return c +} diff --git a/protocol/protobuf/application_metadata_message.pb.go b/protocol/protobuf/application_metadata_message.pb.go index 727da32bc57..05ed522336f 100644 --- a/protocol/protobuf/application_metadata_message.pb.go +++ b/protocol/protobuf/application_metadata_message.pb.go @@ -65,6 +65,7 @@ const ( ApplicationMetadataMessage_SYNC_ACTIVITY_CENTER_DISMISSED ApplicationMetadataMessage_Type = 39 ApplicationMetadataMessage_SYNC_BOOKMARK ApplicationMetadataMessage_Type = 40 ApplicationMetadataMessage_SYNC_CLEAR_HISTORY ApplicationMetadataMessage_Type = 41 + ApplicationMetadataMessage_COMMUNITY_ARCHIVE_MAGNETLINK ApplicationMetadataMessage_Type = 42 ) var ApplicationMetadataMessage_Type_name = map[int32]string{ @@ -110,6 +111,7 @@ var ApplicationMetadataMessage_Type_name = map[int32]string{ 39: "SYNC_ACTIVITY_CENTER_DISMISSED", 40: "SYNC_BOOKMARK", 41: "SYNC_CLEAR_HISTORY", + 42: "COMMUNITY_ARCHIVE_MAGNETLINK", } var ApplicationMetadataMessage_Type_value = map[string]int32{ @@ -155,6 +157,7 @@ var ApplicationMetadataMessage_Type_value = map[string]int32{ "SYNC_ACTIVITY_CENTER_DISMISSED": 39, "SYNC_BOOKMARK": 40, "SYNC_CLEAR_HISTORY": 41, + "COMMUNITY_ARCHIVE_MAGNETLINK": 42, } func (x ApplicationMetadataMessage_Type) String() string { @@ -233,49 +236,51 @@ func init() { } var fileDescriptor_ad09a6406fcf24c7 = []byte{ - // 704 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x54, 0xdb, 0x52, 0x23, 0x37, - 0x10, 0x8d, 0x77, 0x09, 0x2c, 0x6d, 0x60, 0x85, 0x96, 0x8b, 0x31, 0x0b, 0x78, 0xbd, 0x1b, 0x2e, - 0x49, 0x95, 0x53, 0x95, 0x3c, 0xa6, 0xf2, 0x20, 0x4b, 0x0d, 0x16, 0xf6, 0x48, 0x83, 0xa4, 0x71, - 0xca, 0x79, 0x51, 0x0d, 0xc1, 0xa1, 0xa8, 0x02, 0xec, 0x02, 0xf3, 0xc0, 0x8f, 0xe4, 0x2b, 0xf2, - 0x91, 0x29, 0xcd, 0xf8, 0x06, 0x98, 0xf0, 0x64, 0xab, 0xcf, 0x51, 0xb7, 0xfa, 0xf4, 0xe9, 0x81, - 0x6a, 0xda, 0xef, 0x5f, 0x5f, 0xfd, 0x95, 0x0e, 0xae, 0x7a, 0xb7, 0xfe, 0xa6, 0x3b, 0x48, 0x2f, - 0xd2, 0x41, 0xea, 0x6f, 0xba, 0xf7, 0xf7, 0xe9, 0x65, 0xb7, 0xd6, 0xbf, 0xeb, 0x0d, 0x7a, 0xf4, - 0x43, 0xf6, 0x73, 0xfe, 0xf0, 0x77, 0xf5, 0x5f, 0x80, 0x32, 0x9b, 0x5c, 0x88, 0x86, 0xfc, 0x28, - 0xa7, 0xd3, 0xcf, 0xb0, 0x78, 0x7f, 0x75, 0x79, 0x9b, 0x0e, 0x1e, 0xee, 0xba, 0xa5, 0x42, 0xa5, - 0x70, 0xb8, 0x64, 0x26, 0x01, 0x5a, 0x82, 0x85, 0x7e, 0xfa, 0x78, 0xdd, 0x4b, 0x2f, 0x4a, 0xef, - 0x32, 0x6c, 0x74, 0xa4, 0xbf, 0xc3, 0xdc, 0xe0, 0xb1, 0xdf, 0x2d, 0xbd, 0xaf, 0x14, 0x0e, 0x57, - 0x7e, 0x39, 0xaa, 0x8d, 0xea, 0xd5, 0x5e, 0xaf, 0x55, 0x73, 0x8f, 0xfd, 0xae, 0xc9, 0xae, 0x55, - 0xff, 0x59, 0x84, 0xb9, 0x70, 0xa4, 0x45, 0x58, 0x48, 0x54, 0x53, 0xe9, 0x3f, 0x14, 0xf9, 0x8e, - 0x12, 0x58, 0xe2, 0x0d, 0xe6, 0x7c, 0x84, 0xd6, 0xb2, 0x13, 0x24, 0x05, 0x4a, 0x61, 0x85, 0x6b, - 0xe5, 0x18, 0x77, 0x3e, 0x89, 0x05, 0x73, 0x48, 0xde, 0xd1, 0x1d, 0xd8, 0x8a, 0x30, 0xaa, 0xa3, - 0xb1, 0x0d, 0x19, 0x0f, 0xc3, 0xe3, 0x2b, 0xef, 0xe9, 0x3a, 0xac, 0xc6, 0x4c, 0x1a, 0x2f, 0x95, - 0x75, 0xac, 0xd5, 0x62, 0x4e, 0x6a, 0x45, 0xe6, 0x42, 0xd8, 0x76, 0x14, 0x7f, 0x1a, 0xfe, 0x9e, - 0x7e, 0x85, 0x3d, 0x83, 0x67, 0x09, 0x5a, 0xe7, 0x99, 0x10, 0x06, 0xad, 0xf5, 0xc7, 0xda, 0x78, - 0x67, 0x98, 0xb2, 0x8c, 0x67, 0xa4, 0x79, 0xfa, 0x23, 0xec, 0x33, 0xce, 0x31, 0x76, 0xfe, 0x2d, - 0xee, 0x02, 0xfd, 0x09, 0x0e, 0x04, 0xf2, 0x96, 0x54, 0xf8, 0x26, 0xf9, 0x03, 0xdd, 0x84, 0x4f, - 0x23, 0xd2, 0x34, 0xb0, 0x48, 0xd7, 0x80, 0x58, 0x54, 0xe2, 0x49, 0x14, 0xe8, 0x1e, 0x6c, 0x3f, - 0xcf, 0x3d, 0x4d, 0x28, 0x06, 0x69, 0x5e, 0x34, 0xe9, 0x87, 0x02, 0x92, 0xa5, 0xd9, 0x30, 0xe3, - 0x5c, 0x27, 0xca, 0x91, 0x65, 0xfa, 0x05, 0x76, 0x5e, 0xc2, 0x71, 0x52, 0x6f, 0x49, 0xee, 0xc3, - 0x5c, 0xc8, 0x0a, 0xdd, 0x85, 0xf2, 0x68, 0x1e, 0x5c, 0x0b, 0xf4, 0x4c, 0xb4, 0xd1, 0x38, 0x69, - 0x31, 0x42, 0xe5, 0xc8, 0x47, 0x5a, 0x85, 0xdd, 0x38, 0xb1, 0x0d, 0xaf, 0xb4, 0x93, 0xc7, 0x92, - 0xe7, 0x29, 0x0c, 0x9e, 0x48, 0xeb, 0x4c, 0x2e, 0x39, 0x09, 0x0a, 0xfd, 0x3f, 0xc7, 0x1b, 0xb4, - 0xb1, 0x56, 0x16, 0xc9, 0x2a, 0xdd, 0x86, 0xcd, 0x97, 0xe4, 0xb3, 0x04, 0x4d, 0x87, 0x50, 0xfa, - 0x0d, 0x2a, 0xaf, 0x80, 0x93, 0x14, 0x9f, 0x42, 0xd7, 0xb3, 0xea, 0x65, 0xfa, 0x91, 0xb5, 0xd0, - 0xd2, 0x2c, 0x78, 0x78, 0x7d, 0x3d, 0x58, 0x10, 0x23, 0x7d, 0x2a, 0xbd, 0xc1, 0xa1, 0xce, 0x1b, - 0x74, 0x0b, 0xd6, 0x4f, 0x8c, 0x4e, 0xe2, 0x4c, 0x16, 0x2f, 0x55, 0x5b, 0xba, 0xbc, 0xbb, 0x4d, - 0xba, 0x0a, 0xcb, 0x79, 0x50, 0xa0, 0x72, 0xd2, 0x75, 0x48, 0x29, 0xb0, 0xb9, 0x8e, 0xa2, 0x44, - 0x49, 0xd7, 0xf1, 0x02, 0x2d, 0x37, 0x32, 0xce, 0xd8, 0x5b, 0xb4, 0x04, 0x6b, 0x13, 0x68, 0x2a, - 0x4f, 0x39, 0xbc, 0x7a, 0x82, 0x8c, 0xa7, 0xad, 0xfd, 0xa9, 0x96, 0x8a, 0x6c, 0xd3, 0x8f, 0x50, - 0x8c, 0xa5, 0x1a, 0xdb, 0xfe, 0x73, 0xd8, 0x1d, 0x14, 0x72, 0xb2, 0x3b, 0x3b, 0xe1, 0x25, 0xd6, - 0x31, 0x97, 0xd8, 0xd1, 0xea, 0xec, 0x86, 0x5e, 0x04, 0xb6, 0x70, 0x6a, 0x5f, 0xf6, 0x82, 0xa9, - 0x66, 0x79, 0x66, 0x58, 0x9a, 0x54, 0x68, 0x19, 0x36, 0x98, 0xd2, 0xaa, 0x13, 0xe9, 0xc4, 0xfa, - 0x08, 0x9d, 0x91, 0xdc, 0xd7, 0x99, 0xe3, 0x0d, 0xf2, 0x65, 0xbc, 0x55, 0x59, 0xcb, 0x06, 0x23, - 0xdd, 0x46, 0x41, 0xaa, 0x61, 0x6a, 0x93, 0xf0, 0xb0, 0x94, 0x0d, 0x02, 0x0a, 0xf2, 0x95, 0x02, - 0xcc, 0xd7, 0x19, 0x6f, 0x26, 0x31, 0xf9, 0x36, 0x76, 0x64, 0x50, 0xb6, 0x1d, 0x3a, 0xe5, 0xa8, - 0x1c, 0x9a, 0x9c, 0xfa, 0xc3, 0xd8, 0x91, 0xcf, 0xe1, 0x7c, 0x1b, 0x51, 0x90, 0xfd, 0xe0, 0xb8, - 0x99, 0x14, 0x21, 0x6d, 0x24, 0xad, 0x45, 0x41, 0x0e, 0x32, 0x25, 0x02, 0xa7, 0xae, 0x75, 0x33, - 0x62, 0xa6, 0x49, 0x0e, 0xe9, 0x06, 0xd0, 0xfc, 0x85, 0x2d, 0x64, 0xc6, 0x37, 0xa4, 0x75, 0xda, - 0x74, 0xc8, 0x51, 0x7d, 0xf9, 0xcf, 0x62, 0xed, 0xe7, 0xdf, 0x46, 0x5f, 0xb3, 0xf3, 0xf9, 0xec, - 0xdf, 0xaf, 0xff, 0x05, 0x00, 0x00, 0xff, 0xff, 0x84, 0x31, 0xd9, 0x7d, 0x74, 0x05, 0x00, 0x00, + // 725 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x54, 0x5d, 0x53, 0x1b, 0x37, + 0x14, 0xad, 0x13, 0x0a, 0xe1, 0x1a, 0x88, 0x50, 0xf8, 0x30, 0xe6, 0xcb, 0x71, 0xd2, 0x84, 0xa4, + 0x33, 0xee, 0x4c, 0xfb, 0xd8, 0xe9, 0x83, 0x2c, 0xdd, 0xd8, 0x8a, 0xbd, 0xd2, 0x46, 0xd2, 0xba, + 0xe3, 0xbe, 0x68, 0x36, 0x8d, 0xcb, 0x30, 0x03, 0xd8, 0x03, 0xe6, 0x81, 0x7f, 0xd6, 0x5f, 0xd1, + 0xdf, 0xd4, 0xd1, 0xae, 0xed, 0x35, 0x60, 0xc2, 0x93, 0xad, 0x7b, 0x8e, 0xee, 0xd5, 0x3d, 0xf7, + 0xdc, 0x85, 0x7a, 0x3a, 0x1a, 0x9d, 0x9f, 0xfd, 0x9d, 0x8e, 0xcf, 0x86, 0x97, 0xfe, 0x62, 0x30, + 0x4e, 0xbf, 0xa5, 0xe3, 0xd4, 0x5f, 0x0c, 0xae, 0xaf, 0xd3, 0xd3, 0x41, 0x63, 0x74, 0x35, 0x1c, + 0x0f, 0xe9, 0x8b, 0xec, 0xe7, 0xeb, 0xcd, 0x3f, 0xf5, 0xff, 0x00, 0xaa, 0xac, 0xb8, 0x10, 0x4d, + 0xf8, 0x51, 0x4e, 0xa7, 0x07, 0xb0, 0x7a, 0x7d, 0x76, 0x7a, 0x99, 0x8e, 0x6f, 0xae, 0x06, 0x95, + 0x52, 0xad, 0x74, 0xb2, 0x66, 0x8a, 0x00, 0xad, 0xc0, 0xca, 0x28, 0xbd, 0x3d, 0x1f, 0xa6, 0xdf, + 0x2a, 0xcf, 0x32, 0x6c, 0x7a, 0xa4, 0x7f, 0xc0, 0xd2, 0xf8, 0x76, 0x34, 0xa8, 0x3c, 0xaf, 0x95, + 0x4e, 0x36, 0x7e, 0xfd, 0xd0, 0x98, 0xd6, 0x6b, 0x3c, 0x5e, 0xab, 0xe1, 0x6e, 0x47, 0x03, 0x93, + 0x5d, 0xab, 0xff, 0xbb, 0x0a, 0x4b, 0xe1, 0x48, 0xcb, 0xb0, 0x92, 0xa8, 0x8e, 0xd2, 0x7f, 0x2a, + 0xf2, 0x03, 0x25, 0xb0, 0xc6, 0xdb, 0xcc, 0xf9, 0x08, 0xad, 0x65, 0x2d, 0x24, 0x25, 0x4a, 0x61, + 0x83, 0x6b, 0xe5, 0x18, 0x77, 0x3e, 0x89, 0x05, 0x73, 0x48, 0x9e, 0xd1, 0x43, 0xd8, 0x8b, 0x30, + 0x6a, 0xa2, 0xb1, 0x6d, 0x19, 0x4f, 0xc2, 0xb3, 0x2b, 0xcf, 0xe9, 0x36, 0x6c, 0xc6, 0x4c, 0x1a, + 0x2f, 0x95, 0x75, 0xac, 0xdb, 0x65, 0x4e, 0x6a, 0x45, 0x96, 0x42, 0xd8, 0xf6, 0x15, 0xbf, 0x1b, + 0xfe, 0x91, 0xbe, 0x81, 0x63, 0x83, 0x5f, 0x12, 0xb4, 0xce, 0x33, 0x21, 0x0c, 0x5a, 0xeb, 0x3f, + 0x69, 0xe3, 0x9d, 0x61, 0xca, 0x32, 0x9e, 0x91, 0x96, 0xe9, 0x47, 0x78, 0xc7, 0x38, 0xc7, 0xd8, + 0xf9, 0xa7, 0xb8, 0x2b, 0xf4, 0x67, 0x78, 0x2f, 0x90, 0x77, 0xa5, 0xc2, 0x27, 0xc9, 0x2f, 0xe8, + 0x2e, 0xbc, 0x9a, 0x92, 0xe6, 0x81, 0x55, 0xba, 0x05, 0xc4, 0xa2, 0x12, 0x77, 0xa2, 0x40, 0x8f, + 0x61, 0xff, 0x7e, 0xee, 0x79, 0x42, 0x39, 0x48, 0xf3, 0xa0, 0x49, 0x3f, 0x11, 0x90, 0xac, 0x2d, + 0x86, 0x19, 0xe7, 0x3a, 0x51, 0x8e, 0xac, 0xd3, 0xd7, 0x70, 0xf8, 0x10, 0x8e, 0x93, 0x66, 0x57, + 0x72, 0x1f, 0xe6, 0x42, 0x36, 0xe8, 0x11, 0x54, 0xa7, 0xf3, 0xe0, 0x5a, 0xa0, 0x67, 0xa2, 0x87, + 0xc6, 0x49, 0x8b, 0x11, 0x2a, 0x47, 0x5e, 0xd2, 0x3a, 0x1c, 0xc5, 0x89, 0x6d, 0x7b, 0xa5, 0x9d, + 0xfc, 0x24, 0x79, 0x9e, 0xc2, 0x60, 0x4b, 0x5a, 0x67, 0x72, 0xc9, 0x49, 0x50, 0xe8, 0xfb, 0x1c, + 0x6f, 0xd0, 0xc6, 0x5a, 0x59, 0x24, 0x9b, 0x74, 0x1f, 0x76, 0x1f, 0x92, 0xbf, 0x24, 0x68, 0xfa, + 0x84, 0xd2, 0xb7, 0x50, 0x7b, 0x04, 0x2c, 0x52, 0xbc, 0x0a, 0x5d, 0x2f, 0xaa, 0x97, 0xe9, 0x47, + 0xb6, 0x42, 0x4b, 0x8b, 0xe0, 0xc9, 0xf5, 0xed, 0x60, 0x41, 0x8c, 0xf4, 0x67, 0xe9, 0x0d, 0x4e, + 0x74, 0xde, 0xa1, 0x7b, 0xb0, 0xdd, 0x32, 0x3a, 0x89, 0x33, 0x59, 0xbc, 0x54, 0x3d, 0xe9, 0xf2, + 0xee, 0x76, 0xe9, 0x26, 0xac, 0xe7, 0x41, 0x81, 0xca, 0x49, 0xd7, 0x27, 0x95, 0xc0, 0xe6, 0x3a, + 0x8a, 0x12, 0x25, 0x5d, 0xdf, 0x0b, 0xb4, 0xdc, 0xc8, 0x38, 0x63, 0xef, 0xd1, 0x0a, 0x6c, 0x15, + 0xd0, 0x5c, 0x9e, 0x6a, 0x78, 0x75, 0x81, 0xcc, 0xa6, 0xad, 0xfd, 0x67, 0x2d, 0x15, 0xd9, 0xa7, + 0x2f, 0xa1, 0x1c, 0x4b, 0x35, 0xb3, 0xfd, 0x41, 0xd8, 0x1d, 0x14, 0xb2, 0xd8, 0x9d, 0xc3, 0xf0, + 0x12, 0xeb, 0x98, 0x4b, 0xec, 0x74, 0x75, 0x8e, 0x42, 0x2f, 0x02, 0xbb, 0x38, 0xb7, 0x2f, 0xc7, + 0xc1, 0x54, 0x8b, 0x3c, 0x33, 0x29, 0x4d, 0x6a, 0xb4, 0x0a, 0x3b, 0x4c, 0x69, 0xd5, 0x8f, 0x74, + 0x62, 0x7d, 0x84, 0xce, 0x48, 0xee, 0x9b, 0xcc, 0xf1, 0x36, 0x79, 0x3d, 0xdb, 0xaa, 0xac, 0x65, + 0x83, 0x91, 0xee, 0xa1, 0x20, 0xf5, 0x30, 0xb5, 0x22, 0x3c, 0x29, 0x65, 0x83, 0x80, 0x82, 0xbc, + 0xa1, 0x00, 0xcb, 0x4d, 0xc6, 0x3b, 0x49, 0x4c, 0xde, 0xce, 0x1c, 0x19, 0x94, 0xed, 0x85, 0x4e, + 0x39, 0x2a, 0x87, 0x26, 0xa7, 0xfe, 0x34, 0x73, 0xe4, 0x7d, 0x38, 0xdf, 0x46, 0x14, 0xe4, 0x5d, + 0x70, 0xdc, 0x42, 0x8a, 0x90, 0x36, 0x92, 0xd6, 0xa2, 0x20, 0xef, 0x33, 0x25, 0x02, 0xa7, 0xa9, + 0x75, 0x27, 0x62, 0xa6, 0x43, 0x4e, 0xe8, 0x0e, 0xd0, 0xfc, 0x85, 0x5d, 0x64, 0xc6, 0xb7, 0xa5, + 0x75, 0xda, 0xf4, 0xc9, 0x07, 0x5a, 0x83, 0x83, 0x42, 0x76, 0x66, 0x78, 0x5b, 0xf6, 0xd0, 0x47, + 0xac, 0xa5, 0xd0, 0x75, 0xa5, 0xea, 0x90, 0x8f, 0xcd, 0xf5, 0xbf, 0xca, 0x8d, 0x5f, 0x7e, 0x9f, + 0x7e, 0xef, 0xbe, 0x2e, 0x67, 0xff, 0x7e, 0xfb, 0x3f, 0x00, 0x00, 0xff, 0xff, 0xbf, 0x2a, 0xb3, + 0x70, 0x96, 0x05, 0x00, 0x00, } diff --git a/protocol/protobuf/application_metadata_message.proto b/protocol/protobuf/application_metadata_message.proto index efaa2dfe9a9..61a8640ded6 100644 --- a/protocol/protobuf/application_metadata_message.proto +++ b/protocol/protobuf/application_metadata_message.proto @@ -55,5 +55,6 @@ message ApplicationMetadataMessage { SYNC_ACTIVITY_CENTER_DISMISSED = 39; SYNC_BOOKMARK = 40; SYNC_CLEAR_HISTORY = 41; + COMMUNITY_ARCHIVE_MAGNETLINK = 42; } } diff --git a/protocol/protobuf/communities.pb.go b/protocol/protobuf/communities.pb.go index 8c1ad094215..f601e938508 100644 --- a/protocol/protobuf/communities.pb.go +++ b/protocol/protobuf/communities.pb.go @@ -639,6 +639,360 @@ func (m *CommunityRequestToJoinResponse) GetGrant() []byte { return nil } +type CommunityMessageArchiveMagnetlink struct { + Clock uint64 `protobuf:"varint,1,opt,name=clock,proto3" json:"clock,omitempty"` + MagnetUri string `protobuf:"bytes,2,opt,name=magnet_uri,json=magnetUri,proto3" json:"magnet_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommunityMessageArchiveMagnetlink) Reset() { *m = CommunityMessageArchiveMagnetlink{} } +func (m *CommunityMessageArchiveMagnetlink) String() string { return proto.CompactTextString(m) } +func (*CommunityMessageArchiveMagnetlink) ProtoMessage() {} +func (*CommunityMessageArchiveMagnetlink) Descriptor() ([]byte, []int) { + return fileDescriptor_f937943d74c1cd8b, []int{9} +} + +func (m *CommunityMessageArchiveMagnetlink) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommunityMessageArchiveMagnetlink.Unmarshal(m, b) +} +func (m *CommunityMessageArchiveMagnetlink) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommunityMessageArchiveMagnetlink.Marshal(b, m, deterministic) +} +func (m *CommunityMessageArchiveMagnetlink) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommunityMessageArchiveMagnetlink.Merge(m, src) +} +func (m *CommunityMessageArchiveMagnetlink) XXX_Size() int { + return xxx_messageInfo_CommunityMessageArchiveMagnetlink.Size(m) +} +func (m *CommunityMessageArchiveMagnetlink) XXX_DiscardUnknown() { + xxx_messageInfo_CommunityMessageArchiveMagnetlink.DiscardUnknown(m) +} + +var xxx_messageInfo_CommunityMessageArchiveMagnetlink proto.InternalMessageInfo + +func (m *CommunityMessageArchiveMagnetlink) GetClock() uint64 { + if m != nil { + return m.Clock + } + return 0 +} + +func (m *CommunityMessageArchiveMagnetlink) GetMagnetUri() string { + if m != nil { + return m.MagnetUri + } + return "" +} + +type WakuMessage struct { + Sig []byte `protobuf:"bytes,1,opt,name=sig,proto3" json:"sig,omitempty"` + Timestamp uint64 `protobuf:"varint,2,opt,name=timestamp,proto3" json:"timestamp,omitempty"` + Topic []byte `protobuf:"bytes,3,opt,name=topic,proto3" json:"topic,omitempty"` + Payload []byte `protobuf:"bytes,4,opt,name=payload,proto3" json:"payload,omitempty"` + Padding []byte `protobuf:"bytes,5,opt,name=padding,proto3" json:"padding,omitempty"` + Hash []byte `protobuf:"bytes,6,opt,name=hash,proto3" json:"hash,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WakuMessage) Reset() { *m = WakuMessage{} } +func (m *WakuMessage) String() string { return proto.CompactTextString(m) } +func (*WakuMessage) ProtoMessage() {} +func (*WakuMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_f937943d74c1cd8b, []int{10} +} + +func (m *WakuMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WakuMessage.Unmarshal(m, b) +} +func (m *WakuMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WakuMessage.Marshal(b, m, deterministic) +} +func (m *WakuMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_WakuMessage.Merge(m, src) +} +func (m *WakuMessage) XXX_Size() int { + return xxx_messageInfo_WakuMessage.Size(m) +} +func (m *WakuMessage) XXX_DiscardUnknown() { + xxx_messageInfo_WakuMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_WakuMessage proto.InternalMessageInfo + +func (m *WakuMessage) GetSig() []byte { + if m != nil { + return m.Sig + } + return nil +} + +func (m *WakuMessage) GetTimestamp() uint64 { + if m != nil { + return m.Timestamp + } + return 0 +} + +func (m *WakuMessage) GetTopic() []byte { + if m != nil { + return m.Topic + } + return nil +} + +func (m *WakuMessage) GetPayload() []byte { + if m != nil { + return m.Payload + } + return nil +} + +func (m *WakuMessage) GetPadding() []byte { + if m != nil { + return m.Padding + } + return nil +} + +func (m *WakuMessage) GetHash() []byte { + if m != nil { + return m.Hash + } + return nil +} + +type WakuMessageArchiveMetadata struct { + Version uint32 `protobuf:"varint,1,opt,name=version,proto3" json:"version,omitempty"` + From uint64 `protobuf:"varint,2,opt,name=from,proto3" json:"from,omitempty"` + To uint64 `protobuf:"varint,3,opt,name=to,proto3" json:"to,omitempty"` + ContentTopic [][]byte `protobuf:"bytes,4,rep,name=contentTopic,proto3" json:"contentTopic,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WakuMessageArchiveMetadata) Reset() { *m = WakuMessageArchiveMetadata{} } +func (m *WakuMessageArchiveMetadata) String() string { return proto.CompactTextString(m) } +func (*WakuMessageArchiveMetadata) ProtoMessage() {} +func (*WakuMessageArchiveMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_f937943d74c1cd8b, []int{11} +} + +func (m *WakuMessageArchiveMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WakuMessageArchiveMetadata.Unmarshal(m, b) +} +func (m *WakuMessageArchiveMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WakuMessageArchiveMetadata.Marshal(b, m, deterministic) +} +func (m *WakuMessageArchiveMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_WakuMessageArchiveMetadata.Merge(m, src) +} +func (m *WakuMessageArchiveMetadata) XXX_Size() int { + return xxx_messageInfo_WakuMessageArchiveMetadata.Size(m) +} +func (m *WakuMessageArchiveMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_WakuMessageArchiveMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_WakuMessageArchiveMetadata proto.InternalMessageInfo + +func (m *WakuMessageArchiveMetadata) GetVersion() uint32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *WakuMessageArchiveMetadata) GetFrom() uint64 { + if m != nil { + return m.From + } + return 0 +} + +func (m *WakuMessageArchiveMetadata) GetTo() uint64 { + if m != nil { + return m.To + } + return 0 +} + +func (m *WakuMessageArchiveMetadata) GetContentTopic() [][]byte { + if m != nil { + return m.ContentTopic + } + return nil +} + +type WakuMessageArchive struct { + Version uint32 `protobuf:"varint,1,opt,name=version,proto3" json:"version,omitempty"` + Metadata *WakuMessageArchiveMetadata `protobuf:"bytes,2,opt,name=metadata,proto3" json:"metadata,omitempty"` + Messages []*WakuMessage `protobuf:"bytes,3,rep,name=messages,proto3" json:"messages,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WakuMessageArchive) Reset() { *m = WakuMessageArchive{} } +func (m *WakuMessageArchive) String() string { return proto.CompactTextString(m) } +func (*WakuMessageArchive) ProtoMessage() {} +func (*WakuMessageArchive) Descriptor() ([]byte, []int) { + return fileDescriptor_f937943d74c1cd8b, []int{12} +} + +func (m *WakuMessageArchive) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WakuMessageArchive.Unmarshal(m, b) +} +func (m *WakuMessageArchive) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WakuMessageArchive.Marshal(b, m, deterministic) +} +func (m *WakuMessageArchive) XXX_Merge(src proto.Message) { + xxx_messageInfo_WakuMessageArchive.Merge(m, src) +} +func (m *WakuMessageArchive) XXX_Size() int { + return xxx_messageInfo_WakuMessageArchive.Size(m) +} +func (m *WakuMessageArchive) XXX_DiscardUnknown() { + xxx_messageInfo_WakuMessageArchive.DiscardUnknown(m) +} + +var xxx_messageInfo_WakuMessageArchive proto.InternalMessageInfo + +func (m *WakuMessageArchive) GetVersion() uint32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *WakuMessageArchive) GetMetadata() *WakuMessageArchiveMetadata { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *WakuMessageArchive) GetMessages() []*WakuMessage { + if m != nil { + return m.Messages + } + return nil +} + +type WakuMessageArchiveIndexMetadata struct { + Version uint32 `protobuf:"varint,1,opt,name=version,proto3" json:"version,omitempty"` + Metadata *WakuMessageArchiveMetadata `protobuf:"bytes,2,opt,name=metadata,proto3" json:"metadata,omitempty"` + Offset uint64 `protobuf:"varint,3,opt,name=offset,proto3" json:"offset,omitempty"` + Size uint64 `protobuf:"varint,4,opt,name=size,proto3" json:"size,omitempty"` + Padding uint64 `protobuf:"varint,5,opt,name=padding,proto3" json:"padding,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WakuMessageArchiveIndexMetadata) Reset() { *m = WakuMessageArchiveIndexMetadata{} } +func (m *WakuMessageArchiveIndexMetadata) String() string { return proto.CompactTextString(m) } +func (*WakuMessageArchiveIndexMetadata) ProtoMessage() {} +func (*WakuMessageArchiveIndexMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_f937943d74c1cd8b, []int{13} +} + +func (m *WakuMessageArchiveIndexMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WakuMessageArchiveIndexMetadata.Unmarshal(m, b) +} +func (m *WakuMessageArchiveIndexMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WakuMessageArchiveIndexMetadata.Marshal(b, m, deterministic) +} +func (m *WakuMessageArchiveIndexMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_WakuMessageArchiveIndexMetadata.Merge(m, src) +} +func (m *WakuMessageArchiveIndexMetadata) XXX_Size() int { + return xxx_messageInfo_WakuMessageArchiveIndexMetadata.Size(m) +} +func (m *WakuMessageArchiveIndexMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_WakuMessageArchiveIndexMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_WakuMessageArchiveIndexMetadata proto.InternalMessageInfo + +func (m *WakuMessageArchiveIndexMetadata) GetVersion() uint32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *WakuMessageArchiveIndexMetadata) GetMetadata() *WakuMessageArchiveMetadata { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *WakuMessageArchiveIndexMetadata) GetOffset() uint64 { + if m != nil { + return m.Offset + } + return 0 +} + +func (m *WakuMessageArchiveIndexMetadata) GetSize() uint64 { + if m != nil { + return m.Size + } + return 0 +} + +func (m *WakuMessageArchiveIndexMetadata) GetPadding() uint64 { + if m != nil { + return m.Padding + } + return 0 +} + +type WakuMessageArchiveIndex struct { + Archives map[string]*WakuMessageArchiveIndexMetadata `protobuf:"bytes,1,rep,name=archives,proto3" json:"archives,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WakuMessageArchiveIndex) Reset() { *m = WakuMessageArchiveIndex{} } +func (m *WakuMessageArchiveIndex) String() string { return proto.CompactTextString(m) } +func (*WakuMessageArchiveIndex) ProtoMessage() {} +func (*WakuMessageArchiveIndex) Descriptor() ([]byte, []int) { + return fileDescriptor_f937943d74c1cd8b, []int{14} +} + +func (m *WakuMessageArchiveIndex) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WakuMessageArchiveIndex.Unmarshal(m, b) +} +func (m *WakuMessageArchiveIndex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WakuMessageArchiveIndex.Marshal(b, m, deterministic) +} +func (m *WakuMessageArchiveIndex) XXX_Merge(src proto.Message) { + xxx_messageInfo_WakuMessageArchiveIndex.Merge(m, src) +} +func (m *WakuMessageArchiveIndex) XXX_Size() int { + return xxx_messageInfo_WakuMessageArchiveIndex.Size(m) +} +func (m *WakuMessageArchiveIndex) XXX_DiscardUnknown() { + xxx_messageInfo_WakuMessageArchiveIndex.DiscardUnknown(m) +} + +var xxx_messageInfo_WakuMessageArchiveIndex proto.InternalMessageInfo + +func (m *WakuMessageArchiveIndex) GetArchives() map[string]*WakuMessageArchiveIndexMetadata { + if m != nil { + return m.Archives + } + return nil +} + func init() { proto.RegisterEnum("protobuf.CommunityMember_Roles", CommunityMember_Roles_name, CommunityMember_Roles_value) proto.RegisterEnum("protobuf.CommunityPermissions_Access", CommunityPermissions_Access_name, CommunityPermissions_Access_value) @@ -655,64 +1009,90 @@ func init() { proto.RegisterType((*CommunityInvitation)(nil), "protobuf.CommunityInvitation") proto.RegisterType((*CommunityRequestToJoin)(nil), "protobuf.CommunityRequestToJoin") proto.RegisterType((*CommunityRequestToJoinResponse)(nil), "protobuf.CommunityRequestToJoinResponse") + proto.RegisterType((*CommunityMessageArchiveMagnetlink)(nil), "protobuf.CommunityMessageArchiveMagnetlink") + proto.RegisterType((*WakuMessage)(nil), "protobuf.WakuMessage") + proto.RegisterType((*WakuMessageArchiveMetadata)(nil), "protobuf.WakuMessageArchiveMetadata") + proto.RegisterType((*WakuMessageArchive)(nil), "protobuf.WakuMessageArchive") + proto.RegisterType((*WakuMessageArchiveIndexMetadata)(nil), "protobuf.WakuMessageArchiveIndexMetadata") + proto.RegisterType((*WakuMessageArchiveIndex)(nil), "protobuf.WakuMessageArchiveIndex") + proto.RegisterMapType((map[string]*WakuMessageArchiveIndexMetadata)(nil), "protobuf.WakuMessageArchiveIndex.ArchivesEntry") } func init() { proto.RegisterFile("communities.proto", fileDescriptor_f937943d74c1cd8b) } var fileDescriptor_f937943d74c1cd8b = []byte{ - // 859 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x55, 0x6d, 0x8f, 0xdb, 0x44, - 0x10, 0xee, 0x26, 0x71, 0xe2, 0x4c, 0x72, 0x77, 0xbe, 0xbd, 0x6b, 0xeb, 0x5e, 0x45, 0x1b, 0x2c, - 0x90, 0x82, 0x10, 0xa9, 0x48, 0x85, 0x84, 0x78, 0x29, 0xa4, 0x87, 0x55, 0x4c, 0x73, 0x4e, 0xbb, - 0xc9, 0x81, 0xe8, 0x17, 0xcb, 0x71, 0x96, 0xb2, 0x6a, 0x62, 0x1b, 0xaf, 0x73, 0x52, 0x7e, 0x00, - 0x12, 0x3f, 0x01, 0x89, 0xef, 0xfc, 0x27, 0xbe, 0xf1, 0x53, 0xd0, 0xee, 0xc6, 0x2f, 0xc9, 0x25, - 0x6d, 0x25, 0xd4, 0x4f, 0xf1, 0xec, 0xee, 0x3c, 0xf3, 0xcc, 0x33, 0x93, 0x19, 0x38, 0x0e, 0xa2, - 0xc5, 0x62, 0x19, 0xb2, 0x94, 0x51, 0xde, 0x8b, 0x93, 0x28, 0x8d, 0xb0, 0x2e, 0x7f, 0xa6, 0xcb, - 0x5f, 0xce, 0x4e, 0x82, 0x5f, 0xfd, 0xd4, 0x63, 0x33, 0x1a, 0xa6, 0x2c, 0x5d, 0xa9, 0x6b, 0xeb, - 0x0a, 0xb4, 0x27, 0x89, 0x1f, 0xa6, 0xf8, 0x7d, 0x68, 0x67, 0xce, 0x2b, 0x8f, 0xcd, 0x4c, 0xd4, - 0x41, 0xdd, 0x36, 0x69, 0xe5, 0x67, 0xce, 0x0c, 0xdf, 0x85, 0xe6, 0x82, 0x2e, 0xa6, 0x34, 0x11, - 0xf7, 0x15, 0x79, 0xaf, 0xab, 0x03, 0x67, 0x86, 0x6f, 0x43, 0x63, 0x8d, 0x6f, 0x56, 0x3b, 0xa8, - 0xdb, 0x24, 0x75, 0x61, 0x3a, 0x33, 0x7c, 0x0a, 0x5a, 0x30, 0x8f, 0x82, 0x57, 0x66, 0xad, 0x83, - 0xba, 0x35, 0xa2, 0x0c, 0xeb, 0x0f, 0x04, 0x47, 0xe7, 0x19, 0xf6, 0x85, 0x04, 0xc1, 0x9f, 0x81, - 0x96, 0x44, 0x73, 0xca, 0x4d, 0xd4, 0xa9, 0x76, 0x0f, 0xfb, 0xf7, 0x7b, 0x19, 0xf5, 0xde, 0xd6, - 0xcb, 0x1e, 0x11, 0xcf, 0x88, 0x7a, 0x6d, 0x3d, 0x02, 0x4d, 0xda, 0xd8, 0x80, 0xf6, 0xa5, 0xfb, - 0xd4, 0x1d, 0xfd, 0xe4, 0x7a, 0x64, 0x34, 0xb4, 0x8d, 0x1b, 0xb8, 0x0d, 0xba, 0xf8, 0xf2, 0x06, - 0xc3, 0xa1, 0x81, 0xf0, 0x4d, 0x38, 0x96, 0xd6, 0xc5, 0xc0, 0x1d, 0x3c, 0xb1, 0xbd, 0xcb, 0xb1, - 0x4d, 0xc6, 0x46, 0xc5, 0xfa, 0x17, 0xc1, 0x69, 0x1e, 0xe0, 0x19, 0x4d, 0x16, 0x8c, 0x73, 0x16, - 0x85, 0x1c, 0xdf, 0x01, 0x9d, 0x86, 0xdc, 0x8b, 0xc2, 0xf9, 0x4a, 0xca, 0xa1, 0x93, 0x06, 0x0d, - 0xf9, 0x28, 0x9c, 0xaf, 0xb0, 0x09, 0x8d, 0x38, 0x61, 0x57, 0x7e, 0x4a, 0xa5, 0x10, 0x3a, 0xc9, - 0x4c, 0xfc, 0x35, 0xd4, 0xfd, 0x20, 0xa0, 0x9c, 0x4b, 0x19, 0x0e, 0xfb, 0x1f, 0xee, 0xc8, 0xa2, - 0x14, 0xa4, 0x37, 0x90, 0x8f, 0xc9, 0xda, 0xc9, 0x9a, 0x40, 0x5d, 0x9d, 0x60, 0x0c, 0x87, 0x59, - 0x36, 0x83, 0xf3, 0x73, 0x7b, 0x3c, 0x36, 0x6e, 0xe0, 0x63, 0x38, 0x70, 0x47, 0xde, 0x85, 0x7d, - 0xf1, 0xd8, 0x26, 0xe3, 0xef, 0x9d, 0x67, 0x06, 0xc2, 0x27, 0x70, 0xe4, 0xb8, 0x3f, 0x3a, 0x93, - 0xc1, 0xc4, 0x19, 0xb9, 0xde, 0xc8, 0x1d, 0xfe, 0x6c, 0x54, 0xf0, 0x21, 0xc0, 0xc8, 0xf5, 0x88, - 0xfd, 0xfc, 0xd2, 0x1e, 0x4f, 0x8c, 0xaa, 0xf5, 0x97, 0x56, 0x4a, 0xf1, 0x3b, 0xca, 0x83, 0x84, - 0xc5, 0x29, 0x8b, 0xc2, 0xa2, 0x38, 0xa8, 0x54, 0x1c, 0x6c, 0x43, 0x43, 0xd5, 0x95, 0x9b, 0x95, - 0x4e, 0xb5, 0xdb, 0xea, 0x7f, 0xbc, 0x23, 0x89, 0x12, 0x4c, 0x4f, 0x95, 0x85, 0xdb, 0x61, 0x9a, - 0xac, 0x48, 0xe6, 0x8b, 0xbf, 0x85, 0x56, 0x5c, 0x64, 0x2a, 0xf5, 0x68, 0xf5, 0xef, 0xbd, 0x5e, - 0x0f, 0x52, 0x76, 0xc1, 0x7d, 0xd0, 0xb3, 0x7e, 0x35, 0x35, 0xe9, 0x7e, 0xab, 0xe4, 0x2e, 0xfb, - 0x4b, 0xdd, 0x92, 0xfc, 0x1d, 0xfe, 0x06, 0x34, 0xd1, 0x79, 0xdc, 0xac, 0x4b, 0xea, 0x1f, 0xbd, - 0x81, 0xba, 0x40, 0x59, 0x13, 0x57, 0x7e, 0xa2, 0xec, 0x53, 0x3f, 0xf4, 0xe6, 0x8c, 0xa7, 0x66, - 0xa3, 0x53, 0xed, 0x36, 0x49, 0x63, 0xea, 0x87, 0x43, 0xc6, 0x53, 0xec, 0x02, 0x04, 0x7e, 0x4a, - 0x5f, 0x46, 0x09, 0xa3, 0xdc, 0xd4, 0x65, 0x80, 0xde, 0x9b, 0x02, 0xe4, 0x0e, 0x2a, 0x4a, 0x09, - 0xe1, 0xec, 0x12, 0xda, 0x65, 0xe9, 0xb0, 0x01, 0xd5, 0x57, 0x54, 0x35, 0x5b, 0x93, 0x88, 0x4f, - 0xfc, 0x00, 0xb4, 0x2b, 0x7f, 0xbe, 0x54, 0x6d, 0xd6, 0xea, 0xdf, 0xd9, 0xfb, 0x9f, 0x20, 0xea, - 0xdd, 0x17, 0x95, 0xcf, 0xd1, 0xd9, 0x73, 0x80, 0x22, 0xad, 0x1d, 0xa0, 0x9f, 0x6c, 0x82, 0xde, - 0xde, 0x01, 0x2a, 0xfc, 0xcb, 0x90, 0x2f, 0xe0, 0x68, 0x2b, 0x91, 0x1d, 0xb8, 0x9f, 0x6e, 0xe2, - 0xde, 0xdd, 0x85, 0xab, 0x40, 0x56, 0x25, 0x6c, 0xeb, 0x9f, 0x0a, 0x1c, 0x6c, 0x04, 0xc6, 0x8f, - 0x8a, 0x06, 0x44, 0x52, 0xe4, 0x0f, 0xf6, 0x50, 0x7c, 0xbb, 0xce, 0xab, 0xfc, 0xbf, 0xce, 0xab, - 0xbe, 0x65, 0xe7, 0xdd, 0x87, 0xd6, 0xba, 0xb6, 0x72, 0x82, 0xd6, 0xa4, 0x30, 0x59, 0xb9, 0xc5, - 0x00, 0x3d, 0x03, 0x3d, 0x8e, 0x38, 0x13, 0x6d, 0x21, 0xdb, 0x59, 0x23, 0xb9, 0xfd, 0x8e, 0x5a, - 0xc1, 0x9a, 0xc1, 0xf1, 0x35, 0xed, 0xb7, 0x89, 0xa2, 0x6b, 0x44, 0x31, 0xd4, 0x42, 0x7f, 0xa1, - 0x22, 0x35, 0x89, 0xfc, 0xde, 0x20, 0x5f, 0xdd, 0x24, 0x6f, 0xfd, 0x89, 0xe0, 0x24, 0x0f, 0xe3, - 0x84, 0x57, 0x2c, 0xf5, 0xe5, 0x78, 0x79, 0x08, 0x37, 0x8b, 0xa5, 0x32, 0x2b, 0xfe, 0x14, 0xeb, - 0xed, 0x72, 0x1a, 0xec, 0x99, 0x49, 0x2f, 0xc5, 0x4a, 0x5a, 0xaf, 0x18, 0x65, 0xec, 0xdf, 0x2f, - 0xef, 0x01, 0xc4, 0xcb, 0xe9, 0x9c, 0x05, 0x9e, 0xd0, 0xab, 0x26, 0x7d, 0x9a, 0xea, 0xe4, 0x29, - 0x5d, 0x59, 0xbf, 0x23, 0xb8, 0x95, 0x53, 0x23, 0xf4, 0xb7, 0x25, 0xe5, 0xe9, 0x24, 0xfa, 0x21, - 0x62, 0xfb, 0x86, 0xdf, 0x7a, 0xea, 0x97, 0xf2, 0x17, 0x53, 0xdf, 0x15, 0x12, 0xec, 0xe5, 0xb0, - 0xbd, 0x3c, 0x6b, 0xd7, 0x96, 0xa7, 0xf5, 0x37, 0x82, 0x7b, 0xbb, 0x79, 0x10, 0xca, 0xe3, 0x28, - 0xe4, 0x74, 0x0f, 0x9f, 0xaf, 0xa0, 0x99, 0xe3, 0xbc, 0xa6, 0x93, 0x4b, 0x0a, 0x92, 0xc2, 0x41, - 0x54, 0x4d, 0x6c, 0x96, 0x38, 0xa5, 0x8a, 0xb3, 0x4e, 0x72, 0xbb, 0x10, 0xba, 0x56, 0x12, 0xfa, - 0xf1, 0xc1, 0x8b, 0x56, 0xef, 0xc1, 0x97, 0x59, 0x80, 0x69, 0x5d, 0x7e, 0x3d, 0xfc, 0x2f, 0x00, - 0x00, 0xff, 0xff, 0x49, 0xd3, 0x7e, 0x3e, 0x5b, 0x08, 0x00, 0x00, + // 1163 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x56, 0xdd, 0x6e, 0x1a, 0x47, + 0x14, 0xce, 0xf2, 0xbb, 0x1c, 0xb0, 0x8d, 0xc7, 0x7f, 0x84, 0x34, 0x31, 0x59, 0xb5, 0x12, 0x51, + 0x55, 0xac, 0x10, 0x55, 0xaa, 0xfa, 0x93, 0x84, 0xb8, 0x28, 0xa5, 0xb6, 0x21, 0x19, 0x70, 0xd3, + 0xe6, 0x66, 0xb5, 0x2c, 0x63, 0x3c, 0x32, 0xbb, 0x4b, 0x77, 0x06, 0x54, 0x7a, 0xd1, 0xbb, 0x4a, + 0x7d, 0x84, 0x4a, 0xb9, 0xac, 0xd4, 0x17, 0xe9, 0x7d, 0xef, 0x7b, 0xd7, 0x47, 0xa9, 0x66, 0x66, + 0x77, 0x59, 0x6c, 0xb0, 0x2d, 0x45, 0xbd, 0x62, 0xce, 0xcc, 0x9c, 0x6f, 0xbe, 0x73, 0xce, 0xb7, + 0xe7, 0x00, 0x9b, 0xb6, 0xe7, 0x38, 0x13, 0x97, 0x72, 0x4a, 0x58, 0x6d, 0xec, 0x7b, 0xdc, 0x43, + 0xba, 0xfc, 0xe9, 0x4f, 0xce, 0xca, 0x5b, 0xf6, 0xb9, 0xc5, 0x4d, 0x3a, 0x20, 0x2e, 0xa7, 0x7c, + 0xa6, 0x8e, 0x8d, 0x29, 0xa4, 0x5f, 0xfa, 0x96, 0xcb, 0xd1, 0x43, 0x28, 0x84, 0xce, 0x33, 0x93, + 0x0e, 0x4a, 0x5a, 0x45, 0xab, 0x16, 0x70, 0x3e, 0xda, 0x6b, 0x0d, 0xd0, 0x3d, 0xc8, 0x39, 0xc4, + 0xe9, 0x13, 0x5f, 0x9c, 0x27, 0xe4, 0xb9, 0xae, 0x36, 0x5a, 0x03, 0xb4, 0x07, 0xd9, 0x00, 0xbf, + 0x94, 0xac, 0x68, 0xd5, 0x1c, 0xce, 0x08, 0xb3, 0x35, 0x40, 0xdb, 0x90, 0xb6, 0x47, 0x9e, 0x7d, + 0x51, 0x4a, 0x55, 0xb4, 0x6a, 0x0a, 0x2b, 0xc3, 0xf8, 0x4d, 0x83, 0x8d, 0xc3, 0x10, 0xfb, 0x44, + 0x82, 0xa0, 0x4f, 0x21, 0xed, 0x7b, 0x23, 0xc2, 0x4a, 0x5a, 0x25, 0x59, 0x5d, 0xaf, 0xef, 0xd7, + 0x42, 0xea, 0xb5, 0x4b, 0x37, 0x6b, 0x58, 0x5c, 0xc3, 0xea, 0xb6, 0xf1, 0x14, 0xd2, 0xd2, 0x46, + 0x45, 0x28, 0x9c, 0xb6, 0x8f, 0xda, 0x9d, 0x37, 0x6d, 0x13, 0x77, 0x8e, 0x9b, 0xc5, 0x3b, 0xa8, + 0x00, 0xba, 0x58, 0x99, 0x8d, 0xe3, 0xe3, 0xa2, 0x86, 0x76, 0x60, 0x53, 0x5a, 0x27, 0x8d, 0x76, + 0xe3, 0x65, 0xd3, 0x3c, 0xed, 0x36, 0x71, 0xb7, 0x98, 0x30, 0xfe, 0xd5, 0x60, 0x3b, 0x7a, 0xe0, + 0x15, 0xf1, 0x1d, 0xca, 0x18, 0xf5, 0x5c, 0x86, 0xee, 0x82, 0x4e, 0x5c, 0x66, 0x7a, 0xee, 0x68, + 0x26, 0xd3, 0xa1, 0xe3, 0x2c, 0x71, 0x59, 0xc7, 0x1d, 0xcd, 0x50, 0x09, 0xb2, 0x63, 0x9f, 0x4e, + 0x2d, 0x4e, 0x64, 0x22, 0x74, 0x1c, 0x9a, 0xe8, 0x2b, 0xc8, 0x58, 0xb6, 0x4d, 0x18, 0x93, 0x69, + 0x58, 0xaf, 0x7f, 0xb4, 0x24, 0x8a, 0xd8, 0x23, 0xb5, 0x86, 0xbc, 0x8c, 0x03, 0x27, 0xa3, 0x07, + 0x19, 0xb5, 0x83, 0x10, 0xac, 0x87, 0xd1, 0x34, 0x0e, 0x0f, 0x9b, 0xdd, 0x6e, 0xf1, 0x0e, 0xda, + 0x84, 0xb5, 0x76, 0xc7, 0x3c, 0x69, 0x9e, 0xbc, 0x68, 0xe2, 0xee, 0x37, 0xad, 0x57, 0x45, 0x0d, + 0x6d, 0xc1, 0x46, 0xab, 0xfd, 0x5d, 0xab, 0xd7, 0xe8, 0xb5, 0x3a, 0x6d, 0xb3, 0xd3, 0x3e, 0xfe, + 0xa1, 0x98, 0x40, 0xeb, 0x00, 0x9d, 0xb6, 0x89, 0x9b, 0xaf, 0x4f, 0x9b, 0xdd, 0x5e, 0x31, 0x69, + 0xbc, 0x4b, 0xc7, 0x42, 0xfc, 0x9a, 0x30, 0xdb, 0xa7, 0x63, 0x4e, 0x3d, 0x77, 0x5e, 0x1c, 0x2d, + 0x56, 0x1c, 0xd4, 0x84, 0xac, 0xaa, 0x2b, 0x2b, 0x25, 0x2a, 0xc9, 0x6a, 0xbe, 0xfe, 0xf1, 0x92, + 0x20, 0x62, 0x30, 0x35, 0x55, 0x16, 0xd6, 0x74, 0xb9, 0x3f, 0xc3, 0xa1, 0x2f, 0x7a, 0x0e, 0xf9, + 0xf1, 0x3c, 0x52, 0x99, 0x8f, 0x7c, 0xfd, 0xc1, 0xf5, 0xf9, 0xc0, 0x71, 0x17, 0x54, 0x07, 0x3d, + 0xd4, 0x6b, 0x29, 0x2d, 0xdd, 0x77, 0x63, 0xee, 0x52, 0x5f, 0xea, 0x14, 0x47, 0xf7, 0xd0, 0x33, + 0x48, 0x0b, 0xe5, 0xb1, 0x52, 0x46, 0x52, 0x7f, 0x74, 0x03, 0x75, 0x81, 0x12, 0x10, 0x57, 0x7e, + 0xa2, 0xec, 0x7d, 0xcb, 0x35, 0x47, 0x94, 0xf1, 0x52, 0xb6, 0x92, 0xac, 0xe6, 0x70, 0xb6, 0x6f, + 0xb9, 0xc7, 0x94, 0x71, 0xd4, 0x06, 0xb0, 0x2d, 0x4e, 0x86, 0x9e, 0x4f, 0x09, 0x2b, 0xe9, 0xf2, + 0x81, 0xda, 0x4d, 0x0f, 0x44, 0x0e, 0xea, 0x95, 0x18, 0x42, 0xf9, 0x14, 0x0a, 0xf1, 0xd4, 0xa1, + 0x22, 0x24, 0x2f, 0x88, 0x12, 0x5b, 0x0e, 0x8b, 0x25, 0x3a, 0x80, 0xf4, 0xd4, 0x1a, 0x4d, 0x94, + 0xcc, 0xf2, 0xf5, 0xbb, 0x2b, 0xbf, 0x09, 0xac, 0xee, 0x7d, 0x9e, 0xf8, 0x4c, 0x2b, 0xbf, 0x06, + 0x98, 0x87, 0xb5, 0x04, 0xf4, 0x93, 0x45, 0xd0, 0xbd, 0x25, 0xa0, 0xc2, 0x3f, 0x0e, 0xf9, 0x16, + 0x36, 0x2e, 0x05, 0xb2, 0x04, 0xf7, 0xf1, 0x22, 0xee, 0xbd, 0x65, 0xb8, 0x0a, 0x64, 0x16, 0xc3, + 0x36, 0xfe, 0x49, 0xc0, 0xda, 0xc2, 0xc3, 0xe8, 0xe9, 0x5c, 0x80, 0x9a, 0x4c, 0xf2, 0x87, 0x2b, + 0x28, 0xde, 0x4e, 0x79, 0x89, 0xf7, 0x53, 0x5e, 0xf2, 0x96, 0xca, 0xdb, 0x87, 0x7c, 0x50, 0x5b, + 0xd9, 0x41, 0x53, 0x32, 0x31, 0x61, 0xb9, 0x45, 0x03, 0x2d, 0x83, 0x3e, 0xf6, 0x18, 0x15, 0xb2, + 0x90, 0x72, 0x4e, 0xe3, 0xc8, 0xfe, 0x9f, 0xa4, 0x60, 0x0c, 0x60, 0xf3, 0x4a, 0xee, 0x2f, 0x13, + 0xd5, 0xae, 0x10, 0x45, 0x90, 0x72, 0x2d, 0x47, 0xbd, 0x94, 0xc3, 0x72, 0xbd, 0x40, 0x3e, 0xb9, + 0x48, 0xde, 0xf8, 0x5d, 0x83, 0xad, 0xe8, 0x99, 0x96, 0x3b, 0xa5, 0xdc, 0x92, 0xed, 0xe5, 0x09, + 0xec, 0xcc, 0x87, 0xca, 0x60, 0xfe, 0x51, 0x04, 0xd3, 0x65, 0xdb, 0x5e, 0xd1, 0x93, 0x86, 0x62, + 0x24, 0x05, 0x23, 0x46, 0x19, 0xab, 0xe7, 0xcb, 0x7d, 0x80, 0xf1, 0xa4, 0x3f, 0xa2, 0xb6, 0x29, + 0xf2, 0x95, 0x92, 0x3e, 0x39, 0xb5, 0x73, 0x44, 0x66, 0xc6, 0xaf, 0x1a, 0xec, 0x46, 0xd4, 0x30, + 0xf9, 0x71, 0x42, 0x18, 0xef, 0x79, 0xdf, 0x7a, 0x74, 0x55, 0xf3, 0x0b, 0xba, 0x7e, 0x2c, 0x7e, + 0xd1, 0xf5, 0xdb, 0x22, 0x05, 0x2b, 0x39, 0x5c, 0x1e, 0x9e, 0xa9, 0x2b, 0xc3, 0xd3, 0xf8, 0x53, + 0x83, 0x07, 0xcb, 0x79, 0x60, 0xc2, 0xc6, 0x9e, 0xcb, 0xc8, 0x0a, 0x3e, 0x5f, 0x42, 0x2e, 0xc2, + 0xb9, 0x46, 0xc9, 0xb1, 0x0c, 0xe2, 0xb9, 0x83, 0xa8, 0x9a, 0x98, 0x2c, 0x63, 0x4e, 0x14, 0x67, + 0x1d, 0x47, 0xf6, 0x3c, 0xd1, 0xa9, 0x58, 0xa2, 0x8d, 0xef, 0xe1, 0x61, 0x4c, 0x4f, 0x8c, 0x59, + 0x43, 0xd2, 0xf0, 0xed, 0x73, 0x3a, 0x25, 0x27, 0xd6, 0xd0, 0x25, 0x7c, 0x44, 0xdd, 0x8b, 0x15, + 0x54, 0xef, 0x03, 0x38, 0xf2, 0x8e, 0x39, 0xf1, 0x69, 0x90, 0xbc, 0x9c, 0xda, 0x39, 0xf5, 0xa9, + 0xf1, 0x4e, 0x83, 0xfc, 0x1b, 0xeb, 0x62, 0x12, 0xa0, 0x0a, 0x89, 0x33, 0x3a, 0x0c, 0xb4, 0x20, + 0x96, 0xe8, 0x03, 0xc8, 0x71, 0xea, 0x10, 0xc6, 0x2d, 0x67, 0x2c, 0xfd, 0x53, 0x78, 0xbe, 0x21, + 0x1e, 0xe5, 0xde, 0x98, 0xda, 0x32, 0x90, 0x02, 0x56, 0x86, 0x1c, 0xc5, 0xd6, 0x6c, 0xe4, 0x59, + 0x61, 0xda, 0x43, 0x53, 0x9d, 0x0c, 0x06, 0xd4, 0x1d, 0xca, 0xaf, 0x4d, 0x9e, 0x48, 0x53, 0xe8, + 0xfb, 0xdc, 0x62, 0xe7, 0xa5, 0x8c, 0xdc, 0x96, 0x6b, 0xe3, 0x17, 0x28, 0xc7, 0xc8, 0x85, 0x21, + 0x13, 0x6e, 0x0d, 0x2c, 0x6e, 0x09, 0xac, 0x29, 0xf1, 0x59, 0xa8, 0xdd, 0x35, 0x1c, 0x9a, 0x02, + 0xeb, 0xcc, 0xf7, 0x9c, 0x80, 0xae, 0x5c, 0xa3, 0x75, 0x48, 0x70, 0x4f, 0xd2, 0x4c, 0xe1, 0x04, + 0xf7, 0x90, 0x21, 0xf4, 0xe1, 0x72, 0xe2, 0xf2, 0x9e, 0x0c, 0x20, 0x55, 0x49, 0x56, 0x0b, 0x78, + 0x61, 0xcf, 0xf8, 0x43, 0x03, 0x74, 0x95, 0xc0, 0x35, 0x0f, 0x3f, 0x07, 0xdd, 0x09, 0xe8, 0x05, + 0xba, 0x88, 0x75, 0xc9, 0xd5, 0xa1, 0xe0, 0xc8, 0x0b, 0x3d, 0x16, 0x08, 0xf2, 0x8e, 0x98, 0xce, + 0xa2, 0xcf, 0xee, 0x2c, 0x45, 0xc0, 0xd1, 0x35, 0xe3, 0x2f, 0x0d, 0xf6, 0xaf, 0x62, 0xb7, 0xdc, + 0x01, 0xf9, 0xe9, 0x16, 0xb9, 0x7a, 0x7f, 0xca, 0xbb, 0x90, 0xf1, 0xce, 0xce, 0x18, 0xe1, 0x41, + 0x76, 0x03, 0x4b, 0x54, 0x81, 0xd1, 0x9f, 0x49, 0xf0, 0x27, 0x53, 0xae, 0x2f, 0xd7, 0x3f, 0x15, + 0xd5, 0xdf, 0xf8, 0x5b, 0x83, 0xbd, 0x15, 0x51, 0xa0, 0x23, 0xd0, 0x2d, 0x65, 0x87, 0xc3, 0xe7, + 0xe0, 0x3a, 0x8e, 0xd2, 0xa9, 0x16, 0x18, 0xc1, 0x1c, 0x8a, 0x00, 0xca, 0x67, 0xb0, 0xb6, 0x70, + 0xb4, 0xa4, 0xad, 0x3f, 0x5b, 0x6c, 0xeb, 0x8f, 0x6e, 0x7c, 0x2c, 0xca, 0xca, 0xbc, 0xcd, 0xbf, + 0x58, 0x7b, 0x9b, 0xaf, 0x1d, 0x7c, 0x11, 0x7a, 0xf6, 0x33, 0x72, 0xf5, 0xe4, 0xbf, 0x00, 0x00, + 0x00, 0xff, 0xff, 0x98, 0x96, 0xcd, 0x91, 0x10, 0x0c, 0x00, 0x00, } diff --git a/protocol/protobuf/communities.proto b/protocol/protobuf/communities.proto index bc5a7c6db4c..39361e863f6 100644 --- a/protocol/protobuf/communities.proto +++ b/protocol/protobuf/communities.proto @@ -79,3 +79,42 @@ message CommunityRequestToJoinResponse { bool accepted = 3; bytes grant = 4; } + +message CommunityMessageArchiveMagnetlink { + uint64 clock = 1; + string magnet_uri = 2; +} + +message WakuMessage { + bytes sig = 1; + uint64 timestamp = 2; + bytes topic = 3; + bytes payload = 4; + bytes padding = 5; + bytes hash = 6; +} + +message WakuMessageArchiveMetadata { + uint32 version = 1; + uint64 from = 2; + uint64 to = 3; + repeated bytes contentTopic = 4; +} + +message WakuMessageArchive { + uint32 version = 1; + WakuMessageArchiveMetadata metadata = 2; + repeated WakuMessage messages = 3; +} + +message WakuMessageArchiveIndexMetadata { + uint32 version = 1; + WakuMessageArchiveMetadata metadata = 2; + uint64 offset = 3; + uint64 size = 4; + uint64 padding = 5; +} + +message WakuMessageArchiveIndex { + map archives = 1; +} diff --git a/services/ext/api.go b/services/ext/api.go index 53524627103..798c2cbc412 100644 --- a/services/ext/api.go +++ b/services/ext/api.go @@ -928,6 +928,14 @@ func (api *PublicAPI) GetCommunitiesSettings() ([]communities.CommunitySettings, return api.service.messenger.GetCommunitiesSettings() } +func (api *PublicAPI) EnableCommunityHistoryArchiveProtocol() error { + return api.service.messenger.EnableCommunityHistoryArchiveProtocol() +} + +func (api *PublicAPI) DisableCommunityHistoryArchiveProtocol() error { + return api.service.messenger.DisableCommunityHistoryArchiveProtocol() +} + func (api *PublicAPI) AddStorePeer(address string) (string, error) { return api.service.messenger.AddStorePeer(address) }