define a nostr.Kind type for event kinds, make adjustments everywhere and fix some horrible bugs with mmm, lmdb and badger querying and deleting.

This commit is contained in:
fiatjaf
2025-04-20 11:14:39 -03:00
parent 27f40c2cf2
commit 15c6093c9b
74 changed files with 689 additions and 404 deletions

View File

@@ -44,7 +44,7 @@ func (c *Connection) WriteMessage(ctx context.Context, data []byte) error {
func (c *Connection) ReadMessage(ctx context.Context, buf io.Writer) error {
_, reader, err := c.conn.Reader(ctx)
if err != nil {
return fmt.Errorf("failed to get reader: %w", err)
return fmt.Errorf("conn reader: %w", err)
}
if _, err := io.Copy(buf, reader); err != nil {
return fmt.Errorf("failed to read message: %w", err)

View File

@@ -14,7 +14,7 @@ func TestCount(t *testing.T) {
defer rl.Close()
count, _, err := rl.Count(context.Background(), Filter{
Kinds: []uint16{KindFollowList}, Tags: TagMap{"p": []string{"3bf0c63fcb93463407af97a5e5ee64fa883d107ef9e558472c4eb9aaaefa459d"}},
Kinds: []Kind{KindFollowList}, Tags: TagMap{"p": []string{"3bf0c63fcb93463407af97a5e5ee64fa883d107ef9e558472c4eb9aaaefa459d"}},
}, SubscriptionOptions{})
assert.NoError(t, err)
assert.Greater(t, count, uint32(0))

View File

@@ -499,7 +499,7 @@ type sonicMessageParser struct {
reusableIDArray []ID
reusablePubKeyArray []PubKey
reusableStringArray []string
reusableUint16Array []uint16
reusableUint16Array []Kind
}
// NewMessageParser returns a sonicMessageParser object that is intended to be reused many times.
@@ -510,7 +510,7 @@ func NewMessageParser() sonicMessageParser {
reusableStringArray: make([]string, 0, 10000),
reusableIDArray: make([]ID, 0, 10000),
reusablePubKeyArray: make([]PubKey, 0, 10000),
reusableUint16Array: make([]uint16, 0, 10000),
reusableUint16Array: make([]Kind, 0, 10000),
}
}
@@ -538,14 +538,14 @@ func (smp *sonicMessageParser) doneWithIDSlice(slice []string) {
}
}
func (smp *sonicMessageParser) doneWithUint16Slice(slice []uint16) {
func (smp *sonicMessageParser) doneWithUint16Slice(slice []Kind) {
if unsafe.SliceData(smp.reusableUint16Array) == unsafe.SliceData(slice) {
smp.reusableUint16Array = slice[len(slice):]
}
if cap(smp.reusableUint16Array) < 8 {
// create a new one
smp.reusableUint16Array = make([]uint16, 0, 10000)
smp.reusableUint16Array = make([]Kind, 0, 10000)
}
}

View File

@@ -99,7 +99,7 @@ func TestParseMessage(t *testing.T) {
{
Name: "REQ envelope",
Message: `["REQ","million", {"kinds": [1]}, {"kinds": [30023 ], "#d": ["buteko", "batuke"]}]`,
ExpectedEnvelope: &ReqEnvelope{SubscriptionID: "million", Filters: Filters{{Kinds: []uint16{1}}, {Kinds: []uint16{30023}, Tags: TagMap{"d": []string{"buteko", "batuke"}}}}},
ExpectedEnvelope: &ReqEnvelope{SubscriptionID: "million", Filters: Filters{{Kinds: []Kind{1}}, {Kinds: []Kind{30023}, Tags: TagMap{"d": []string{"buteko", "batuke"}}}}},
},
{
Name: "CLOSE envelope",

View File

@@ -13,7 +13,7 @@ func TestEOSEMadness(t *testing.T) {
defer rl.Close()
sub, err := rl.Subscribe(context.Background(), Filter{
Kinds: []uint16{KindTextNote}, Limit: 2,
Kinds: []Kind{KindTextNote}, Limit: 2,
}, SubscriptionOptions{})
assert.NoError(t, err)

View File

@@ -13,7 +13,7 @@ type Event struct {
ID ID
PubKey PubKey
CreatedAt Timestamp
Kind uint16
Kind Kind
Tags Tags
Content string
Sig [64]byte

View File

@@ -41,7 +41,7 @@ func easyjsonF642ad3eDecodeGithubComNbdWtfGoNostr(in *jlexer.Lexer, out *Event)
case "created_at":
out.CreatedAt = Timestamp(in.Int64())
case "kind":
out.Kind = uint16(in.Int())
out.Kind = Kind(in.Int())
case "tags":
if in.IsNull() {
in.Skip()

View File

@@ -60,9 +60,9 @@ func (b *BadgerBackend) CountEvents(filter nostr.Filter) (uint32, error) {
return err
}
err = item.Value(func(val []byte) error {
err = item.Value(func(bin []byte) error {
evt := nostr.Event{}
if err := betterbinary.Unmarshal(val, &evt); err != nil {
if err := betterbinary.Unmarshal(bin, &evt); err != nil {
return err
}
@@ -135,15 +135,15 @@ func (b *BadgerBackend) CountEventsHLL(filter nostr.Filter, offset int) (uint32,
return err
}
err = item.Value(func(val []byte) error {
err = item.Value(func(bin []byte) error {
if extraFilter == nil {
hll.AddBytes([32]byte(val[32:64]))
hll.AddBytes(betterbinary.GetPubKey(bin))
count++
return nil
}
evt := nostr.Event{}
if err := betterbinary.Unmarshal(val, &evt); err != nil {
if err := betterbinary.Unmarshal(bin, &evt); err != nil {
return err
}
if extraFilter.Matches(evt) {

View File

@@ -12,6 +12,8 @@ import (
var serialDelete uint32 = 0
func (b *BadgerBackend) DeleteEvent(id nostr.ID) error {
fmt.Println("...", id)
deletionHappened := false
err := b.Update(func(txn *badger.Txn) error {
@@ -52,21 +54,24 @@ func (b *BadgerBackend) delete(txn *badger.Txn, id nostr.ID) (bool, error) {
var evt nostr.Event
it := txn.NewIterator(opts)
defer it.Close()
it.Seek(prefix)
if it.ValidForPrefix(prefix) {
idx = append(idx, it.Item().Key()[1+8:]...)
if err := it.Item().Value(func(val []byte) error {
return betterbinary.Unmarshal(val, &evt)
}); err != nil {
return false, fmt.Errorf("failed to unmarshal event %x to delete: %w", id[:], err)
idx = append(idx, it.Item().Key()[1+8:1+8+4]...)
item, err := txn.Get(idx)
if err == badger.ErrKeyNotFound {
// this event doesn't exist or is already deleted
return false, nil
} else if err != nil {
return false, fmt.Errorf("failed to fetch event %x to delete: %w", id[:], err)
} else {
if err := item.Value(func(bin []byte) error {
return betterbinary.Unmarshal(bin, &evt)
}); err != nil {
return false, fmt.Errorf("failed to unmarshal event %x to delete: %w", id[:], err)
}
}
}
it.Close()
// if no idx was found, end here, this event doesn't exist
if len(idx) == 1 {
return false, nil
}
// calculate all index keys we have for this event and delete them
for k := range b.getIndexKeysForEvent(evt, idx[1:]) {

View File

@@ -70,11 +70,11 @@ func FuzzQuery(f *testing.F) {
Authors: make([]nostr.PubKey, authors),
Limit: int(limit),
}
var maxKind uint16 = 1
var maxKind nostr.Kind = 1
if kinds > 0 {
filter.Kinds = make([]uint16, kinds)
filter.Kinds = make([]nostr.Kind, kinds)
for i := range filter.Kinds {
filter.Kinds[i] = uint16(kindFactor) * uint16(i)
filter.Kinds[i] = nostr.Kind(kindFactor) * nostr.Kind(i)
}
maxKind = filter.Kinds[len(filter.Kinds)-1]
}
@@ -96,7 +96,7 @@ func FuzzQuery(f *testing.F) {
CreatedAt: nostr.Timestamp(skseed)*nostr.Timestamp(timestampAuthorFactor) + nostr.Timestamp(i),
Content: fmt.Sprintf("unbalanced %d", i),
Tags: nostr.Tags{},
Kind: uint16(i) % maxKind,
Kind: nostr.Kind(i) % maxKind,
}
err := evt.Sign(sk)
require.NoError(t, err)

View File

@@ -19,7 +19,7 @@ func getTagIndexPrefix(tagValue string) ([]byte, int) {
// store value in the new special "a" tag index
k = make([]byte, 1+2+8+len(d)+4+4)
k[0] = indexTagAddrPrefix
binary.BigEndian.PutUint16(k[1:], kind)
binary.BigEndian.PutUint16(k[1:], uint16(kind))
copy(k[1+2:], pkb[0:8])
copy(k[1+2+8:], d)
offset = 1 + 2 + 8 + len(d)
@@ -137,12 +137,12 @@ func (b *BadgerBackend) getIndexKeysForEvent(evt nostr.Event, idx []byte) iter.S
}
}
func getAddrTagElements(tagValue string) (kind uint16, pkb []byte, d string) {
func getAddrTagElements(tagValue string) (kind nostr.Kind, pkb []byte, d string) {
spl := strings.Split(tagValue, ":")
if len(spl) == 3 {
if pkb, _ := hex.DecodeString(spl[1]); len(pkb) == 32 {
if kind, err := strconv.ParseUint(spl[0], 10, 16); err == nil {
return uint16(kind), pkb, spl[2]
return nostr.Kind(kind), pkb, spl[2]
}
}
}

View File

@@ -161,26 +161,26 @@ func (b *BadgerBackend) query(txn *badger.Txn, filter nostr.Filter, limit int) (
return nil, err
}
if err := item.Value(func(val []byte) error {
// fmt.Println(" event", hex.EncodeToString(val[0:4]), "kind", binary.BigEndian.Uint16(val[132:134]), "author", hex.EncodeToString(val[32:36]), "ts", nostr.Timestamp(binary.BigEndian.Uint32(val[128:132])))
if err := item.Value(func(bin []byte) error {
// fmt.Println(" event", betterbinary.GetID(bin ), "kind", betterbinary.GetKind(bin ).Num(), "author", betterbinary.GetPubKey(bin ), "ts", betterbinary.GetCreatedAt(bin ), hex.EncodeToString(it.key), it.valIdx)
// check it against pubkeys without decoding the entire thing
if extraFilter != nil && extraFilter.Authors != nil &&
!nostr.ContainsPubKey(extraFilter.Authors, nostr.PubKey(val[32:64])) {
!nostr.ContainsPubKey(extraFilter.Authors, betterbinary.GetPubKey(bin)) {
// fmt.Println(" skipped (authors)")
return nil
}
// check it against kinds without decoding the entire thing
if extraFilter != nil && extraFilter.Kinds != nil &&
!slices.Contains(extraFilter.Kinds, binary.BigEndian.Uint16(val[132:134])) {
!slices.Contains(extraFilter.Kinds, betterbinary.GetKind(bin)) {
// fmt.Println(" skipped (kinds)")
return nil
}
event := nostr.Event{}
if err := betterbinary.Unmarshal(val, &event); err != nil {
log.Printf("badger: value read error (id %x): %s\n", val[0:32], err)
if err := betterbinary.Unmarshal(bin, &event); err != nil {
log.Printf("badger: value read error (id %x): %s\n", betterbinary.GetID(bin), err)
return err
}

View File

@@ -16,8 +16,8 @@ func (b *BadgerBackend) ReplaceEvent(evt nostr.Event) error {
}
return b.Update(func(txn *badger.Txn) error {
filter := nostr.Filter{Limit: 1, Kinds: []uint16{evt.Kind}, Authors: []nostr.PubKey{evt.PubKey}}
if nostr.IsAddressableKind(evt.Kind) {
filter := nostr.Filter{Limit: 1, Kinds: []nostr.Kind{evt.Kind}, Authors: []nostr.PubKey{evt.PubKey}}
if evt.Kind.IsAddressable() {
// when addressable, add the "d" tag to the filter
filter.Tags = nostr.TagMap{"d": []string{evt.Tags.GetD()}}
}

View File

@@ -12,8 +12,8 @@ func (b *BlugeBackend) ReplaceEvent(evt nostr.Event) error {
b.Lock()
defer b.Unlock()
filter := nostr.Filter{Limit: 1, Kinds: []uint16{evt.Kind}, Authors: []nostr.PubKey{evt.PubKey}}
if nostr.IsAddressableKind(evt.Kind) {
filter := nostr.Filter{Limit: 1, Kinds: []nostr.Kind{evt.Kind}, Authors: []nostr.PubKey{evt.PubKey}}
if evt.Kind.IsReplaceable() {
filter.Tags = nostr.TagMap{"d": []string{evt.Tags.GetD()}}
}

View File

@@ -0,0 +1,23 @@
package betterbinary
import (
"encoding/binary"
"fiatjaf.com/nostr"
)
func GetKind(evtb []byte) nostr.Kind {
return nostr.Kind(binary.LittleEndian.Uint16(evtb[1:3]))
}
func GetID(evtb []byte) nostr.ID {
return nostr.ID(evtb[7:39])
}
func GetPubKey(evtb []byte) nostr.PubKey {
return nostr.PubKey(evtb[39:71])
}
func GetCreatedAt(evtb []byte) nostr.Timestamp {
return nostr.Timestamp(binary.LittleEndian.Uint32(evtb[3:7]))
}

View File

@@ -107,7 +107,7 @@ func Unmarshal(data []byte, evt *nostr.Event) (err error) {
}
}()
evt.Kind = uint16(binary.LittleEndian.Uint16(data[1:3]))
evt.Kind = nostr.Kind(binary.LittleEndian.Uint16(data[1:3]))
evt.CreatedAt = nostr.Timestamp(binary.LittleEndian.Uint32(data[3:7]))
evt.ID = nostr.ID(data[7:39])
evt.PubKey = nostr.PubKey(data[39:71])

View File

@@ -27,7 +27,3 @@ func TagMatches(evtb []byte, key string, vals []string) bool {
}
return false
}
func KindMatches(evtb []byte, kind uint16) bool {
return binary.LittleEndian.Uint16(evtb[1:3]) == kind
}

View File

@@ -53,25 +53,25 @@ func (b *LMDBBackend) CountEvents(filter nostr.Filter) (uint32, error) {
count++
} else {
// fetch actual event
val, err := txn.Get(b.rawEventStore, it.valIdx)
bin, err := txn.Get(b.rawEventStore, it.valIdx)
if err != nil {
panic(err)
}
// check it against pubkeys without decoding the entire thing
if !slices.Contains(extraAuthors, [32]byte(val[32:64])) {
if !slices.Contains(extraAuthors, betterbinary.GetPubKey(bin)) {
it.next()
continue
}
// check it against kinds without decoding the entire thing
if !slices.Contains(extraKinds, [2]byte(val[132:134])) {
if !slices.Contains(extraKinds, betterbinary.GetKind(bin)) {
it.next()
continue
}
evt := &nostr.Event{}
if err := betterbinary.Unmarshal(val, evt); err != nil {
if err := betterbinary.Unmarshal(bin, evt); err != nil {
it.next()
continue
}
@@ -139,7 +139,7 @@ func (b *LMDBBackend) CountEventsHLL(filter nostr.Filter, offset int) (uint32, *
}
// fetch actual event (we need it regardless because we need the pubkey for the hll)
val, err := txn.Get(b.rawEventStore, it.valIdx)
bin, err := txn.Get(b.rawEventStore, it.valIdx)
if err != nil {
panic(err)
}
@@ -147,16 +147,16 @@ func (b *LMDBBackend) CountEventsHLL(filter nostr.Filter, offset int) (uint32, *
if extraKinds == nil && extraTagValues == nil {
// nothing extra to check
count++
hll.AddBytes(nostr.PubKey(val[32:64]))
hll.AddBytes(betterbinary.GetPubKey(bin))
} else {
// check it against kinds without decoding the entire thing
if !slices.Contains(extraKinds, [2]byte(val[132:134])) {
if !slices.Contains(extraKinds, betterbinary.GetKind(bin)) {
it.next()
continue
}
evt := &nostr.Event{}
if err := betterbinary.Unmarshal(val, evt); err != nil {
if err := betterbinary.Unmarshal(bin, evt); err != nil {
it.next()
continue
}

View File

@@ -26,13 +26,13 @@ func (b *LMDBBackend) delete(txn *lmdb.Txn, id nostr.ID) error {
}
// if we do, get it so we can compute the indexes
buf, err := txn.Get(b.rawEventStore, idx)
bin, err := txn.Get(b.rawEventStore, idx)
if err != nil {
return fmt.Errorf("failed to get raw event %x to delete: %w", id, err)
}
var evt nostr.Event
if err := betterbinary.Unmarshal(buf, &evt); err != nil {
if err := betterbinary.Unmarshal(bin, &evt); err != nil {
return fmt.Errorf("failed to unmarshal raw event %x to delete: %w", id, err)
}

View File

@@ -49,11 +49,11 @@ func FuzzQuery(f *testing.F) {
Authors: make([]nostr.PubKey, authors),
Limit: int(limit),
}
var maxKind uint16 = 1
var maxKind nostr.Kind = 1
if kinds > 0 {
filter.Kinds = make([]uint16, kinds)
filter.Kinds = make([]nostr.Kind, kinds)
for i := range filter.Kinds {
filter.Kinds[i] = uint16(int(kindFactor) * i)
filter.Kinds[i] = nostr.Kind(int(kindFactor) * i)
}
maxKind = filter.Kinds[len(filter.Kinds)-1]
}
@@ -75,7 +75,7 @@ func FuzzQuery(f *testing.F) {
CreatedAt: nostr.Timestamp(skseed)*nostr.Timestamp(timestampAuthorFactor) + nostr.Timestamp(i),
Content: fmt.Sprintf("unbalanced %d", i),
Tags: nostr.Tags{},
Kind: uint16(i) % maxKind,
Kind: nostr.Kind(i) % maxKind,
}
err := evt.Sign(sk)
require.NoError(t, err)

View File

@@ -6,6 +6,7 @@ import (
"os"
"sync/atomic"
"fiatjaf.com/nostr"
"fiatjaf.com/nostr/eventstore"
"github.com/PowerDNS/lmdb-go/lmdb"
)
@@ -34,7 +35,7 @@ type LMDBBackend struct {
indexPTagKind lmdb.DBI
hllCache lmdb.DBI
EnableHLLCacheFor func(kind uint16) (useCache bool, skipSavingActualEvent bool)
EnableHLLCacheFor func(kind nostr.Kind) (useCache bool, skipSavingActualEvent bool)
lastId atomic.Uint32
}

View File

@@ -113,9 +113,10 @@ func (b *LMDBBackend) query(txn *lmdb.Txn, filter nostr.Filter, limit int) ([]in
if oldest.Q == q && remainingUnexhausted > 1 {
continue
}
// fmt.Println(" query", q, unsafe.Pointer(&results[q]), b.dbiName(query.dbi), hex.EncodeToString(query.prefix), len(results[q]))
// fmt.Println(" query", q, unsafe.Pointer(&results[q]), b.dbiName(query.dbi), hex.EncodeToString(query.prefix), hex.EncodeToString(query.startingPoint), len(results[q]))
it := iterators[q]
// fmt.Println(" ", q, unsafe.Pointer(iterators[q]), it.err)
pulledThisIteration := 0
for {
@@ -124,7 +125,7 @@ func (b *LMDBBackend) query(txn *lmdb.Txn, filter nostr.Filter, limit int) ([]in
len(it.key) != query.keySize ||
!bytes.HasPrefix(it.key, query.prefix) {
// either iteration has errored or we reached the end of this prefix
// fmt.Println(" reached end", it.key, query.keySize, query.prefix)
// fmt.Println(" reached end", hex.EncodeToString(it.key), query.keySize, hex.EncodeToString(query.prefix), it.err)
exhaust(q)
break
}
@@ -140,7 +141,7 @@ func (b *LMDBBackend) query(txn *lmdb.Txn, filter nostr.Filter, limit int) ([]in
}
// fetch actual event
val, err := txn.Get(b.rawEventStore, it.valIdx)
bin, err := txn.Get(b.rawEventStore, it.valIdx)
if err != nil {
log.Printf(
"lmdb: failed to get %x based on prefix %x, index key %x from raw event store: %s\n",
@@ -149,26 +150,26 @@ func (b *LMDBBackend) query(txn *lmdb.Txn, filter nostr.Filter, limit int) ([]in
}
// check it against pubkeys without decoding the entire thing
if extraAuthors != nil && !slices.Contains(extraAuthors, [32]byte(val[32:64])) {
if extraAuthors != nil && !slices.Contains(extraAuthors, betterbinary.GetPubKey(bin)) {
it.next()
continue
}
// check it against kinds without decoding the entire thing
if extraKinds != nil && !slices.Contains(extraKinds, [2]byte(val[132:134])) {
if extraKinds != nil && !slices.Contains(extraKinds, betterbinary.GetKind(bin)) {
it.next()
continue
}
// decode the entire thing
event := nostr.Event{}
if err := betterbinary.Unmarshal(val, &event); err != nil {
log.Printf("lmdb: value read error (id %x) on query prefix %x sp %x dbi %d: %s\n", val[0:32],
if err := betterbinary.Unmarshal(bin, &event); err != nil {
log.Printf("lmdb: value read error (id %x) on query prefix %x sp %x dbi %d: %s\n", betterbinary.GetID(bin),
query.prefix, query.startingPoint, query.dbi, err)
return nil, fmt.Errorf("event read error: %w", err)
}
// fmt.Println(" event", hex.EncodeToString(val[0:4]), "kind", binary.BigEndian.Uint16(val[132:134]), "author", hex.EncodeToString(val[32:36]), "ts", nostr.Timestamp(binary.BigEndian.Uint32(val[128:132])), hex.EncodeToString(it.key), it.valIdx)
// fmt.Println(" event", betterbinary.GetID(bin), "kind", betterbinary.GetKind(bin).Num(), "author", betterbinary.GetPubKey(bin), "ts", betterbinary.GetCreatedAt(bin), hex.EncodeToString(it.key), it.valIdx)
// if there is still a tag to be checked, do it now
if extraTagValues != nil && !event.Tags.ContainsAny(extraTagKey, extraTagValues) {

View File

@@ -22,8 +22,8 @@ type query struct {
func (b *LMDBBackend) prepareQueries(filter nostr.Filter) (
queries []query,
extraAuthors [][32]byte,
extraKinds [][2]byte,
extraAuthors []nostr.PubKey,
extraKinds []nostr.Kind,
extraTagKey string,
extraTagValues []string,
since uint32,
@@ -127,16 +127,16 @@ func (b *LMDBBackend) prepareQueries(filter nostr.Filter) (
// add an extra kind filter if available (only do this on plain tag index, not on ptag-kind index)
if filter.Kinds != nil {
extraKinds = make([][2]byte, len(filter.Kinds))
extraKinds = make([]nostr.Kind, len(filter.Kinds))
for i, kind := range filter.Kinds {
binary.BigEndian.PutUint16(extraKinds[i][0:2], uint16(kind))
extraKinds[i] = kind
}
}
}
// add an extra author search if possible
if filter.Authors != nil {
extraAuthors = make([][32]byte, len(filter.Authors))
extraAuthors = make([]nostr.PubKey, len(filter.Authors))
for i, pk := range filter.Authors {
extraAuthors[i] = pk
}

View File

@@ -16,8 +16,8 @@ func (b *LMDBBackend) ReplaceEvent(evt nostr.Event) error {
}
return b.lmdbEnv.Update(func(txn *lmdb.Txn) error {
filter := nostr.Filter{Limit: 1, Kinds: []uint16{evt.Kind}, Authors: []nostr.PubKey{evt.PubKey}}
if nostr.IsAddressableKind(evt.Kind) {
filter := nostr.Filter{Limit: 1, Kinds: []nostr.Kind{evt.Kind}, Authors: []nostr.PubKey{evt.PubKey}}
if evt.Kind.IsAddressable() {
// when addressable, add the "d" tag to the filter
filter.Tags = nostr.TagMap{"d": []string{evt.Tags.GetD()}}
}

View File

@@ -54,13 +54,13 @@ func (il *IndexingLayer) CountEvents(filter nostr.Filter) (uint32, error) {
bin := il.mmmm.mmapf[pos.start : pos.start+uint64(pos.size)]
// check it against pubkeys without decoding the entire thing
if extraAuthors != nil && !slices.Contains(extraAuthors, [32]byte(bin[39:71])) {
if extraAuthors != nil && !slices.Contains(extraAuthors, betterbinary.GetPubKey(bin)) {
it.next()
continue
}
// check it against kinds without decoding the entire thing
if extraKinds != nil && !slices.Contains(extraKinds, [2]byte(bin[1:3])) {
if extraKinds != nil && !slices.Contains(extraKinds, betterbinary.GetKind(bin)) {
it.next()
continue
}

View File

@@ -69,7 +69,7 @@ func FuzzTest(f *testing.F) {
evt := nostr.Event{
CreatedAt: nostr.Timestamp(i),
Kind: uint16(i), // hack to query by serial id
Kind: nostr.Kind(i), // hack to query by serial id
Tags: tags,
Content: fmt.Sprintf("test content %d", i),
}
@@ -148,13 +148,13 @@ func FuzzTest(f *testing.F) {
for _, layer := range mmm.layers {
// verify event still accessible from other layers
if slices.Contains(foundlayers, layer) {
next, stop := iter.Pull(layer.QueryEvents(nostr.Filter{Kinds: []uint16{evt.Kind}})) // hack
next, stop := iter.Pull(layer.QueryEvents(nostr.Filter{Kinds: []nostr.Kind{evt.Kind}})) // hack
_, fetched := next()
require.True(t, fetched)
stop()
} else {
// and not accessible from this layer we just deleted
next, stop := iter.Pull(layer.QueryEvents(nostr.Filter{Kinds: []uint16{evt.Kind}})) // hack
next, stop := iter.Pull(layer.QueryEvents(nostr.Filter{Kinds: []nostr.Kind{evt.Kind}})) // hack
_, fetched := next()
require.True(t, fetched)
stop()

View File

@@ -212,13 +212,13 @@ func (il *IndexingLayer) query(txn *lmdb.Txn, filter nostr.Filter, limit int) ([
bin := il.mmmm.mmapf[pos.start : pos.start+uint64(pos.size)]
// check it against pubkeys without decoding the entire thing
if extraAuthors != nil && !slices.Contains(extraAuthors, [32]byte(bin[39:71])) {
if extraAuthors != nil && !slices.Contains(extraAuthors, betterbinary.GetPubKey(bin)) {
it.next()
continue
}
// check it against kinds without decoding the entire thing
if extraKinds != nil && !slices.Contains(extraKinds, [2]byte(bin[1:3])) {
if extraKinds != nil && !slices.Contains(extraKinds, betterbinary.GetKind(bin)) {
it.next()
continue
}
@@ -231,7 +231,7 @@ func (il *IndexingLayer) query(txn *lmdb.Txn, filter nostr.Filter, limit int) ([
return nil, fmt.Errorf("event read error: %w", err)
}
// fmt.Println(" event", hex.EncodeToString(val[0:4]), "kind", binary.BigEndian.Uint16(val[132:134]), "author", hex.EncodeToString(val[32:36]), "ts", nostr.Timestamp(binary.BigEndian.Uint32(val[128:132])), hex.EncodeToString(it.key), it.valIdx)
// fmt.Println(" event", betterbinary.GetID(bin), "kind", betterbinary.GetKind(bin).Num(), "author", betterbinary.GetPubKey(bin), "ts", betterbinary.GetCreatedAt(bin), hex.EncodeToString(it.key), it.valIdx)
// if there is still a tag to be checked, do it now
if extraTagValues != nil && !event.Tags.ContainsAny(extraTagKey, extraTagValues) {

View File

@@ -22,8 +22,8 @@ type query struct {
func (il *IndexingLayer) prepareQueries(filter nostr.Filter) (
queries []query,
extraAuthors [][32]byte,
extraKinds [][2]byte,
extraAuthors []nostr.PubKey,
extraKinds []nostr.Kind,
extraTagKey string,
extraTagValues []string,
since uint32,
@@ -116,16 +116,16 @@ func (il *IndexingLayer) prepareQueries(filter nostr.Filter) (
// add an extra kind filter if available (only do this on plain tag index, not on ptag-kind index)
if filter.Kinds != nil {
extraKinds = make([][2]byte, len(filter.Kinds))
extraKinds = make([]nostr.Kind, len(filter.Kinds))
for i, kind := range filter.Kinds {
binary.BigEndian.PutUint16(extraKinds[i][0:2], uint16(kind))
extraKinds[i] = kind
}
}
}
// add an extra author search if possible
if filter.Authors != nil {
extraAuthors = make([][32]byte, len(filter.Authors))
extraAuthors = make([]nostr.PubKey, len(filter.Authors))
for i, pk := range filter.Authors {
copy(extraAuthors[i][:], pk[:])
}

View File

@@ -22,8 +22,8 @@ func (il *IndexingLayer) ReplaceEvent(evt nostr.Event) error {
runtime.LockOSThread()
defer runtime.UnlockOSThread()
filter := nostr.Filter{Limit: 1, Kinds: []uint16{evt.Kind}, Authors: []nostr.PubKey{evt.PubKey}}
if nostr.IsAddressableKind(evt.Kind) {
filter := nostr.Filter{Limit: 1, Kinds: []nostr.Kind{evt.Kind}, Authors: []nostr.PubKey{evt.PubKey}}
if evt.Kind.IsAddressable() {
// when addressable, add the "d" tag to the filter
filter.Tags = nostr.TagMap{"d": []string{evt.Tags.GetD()}}
}

View File

@@ -130,8 +130,8 @@ func (b *SliceStore) ReplaceEvent(evt nostr.Event) error {
b.Lock()
defer b.Unlock()
filter := nostr.Filter{Limit: 1, Kinds: []uint16{evt.Kind}, Authors: []nostr.PubKey{evt.PubKey}}
if nostr.IsAddressableKind(evt.Kind) {
filter := nostr.Filter{Limit: 1, Kinds: []nostr.Kind{evt.Kind}, Authors: []nostr.PubKey{evt.PubKey}}
if evt.Kind.IsAddressable() {
filter.Tags = nostr.TagMap{"d": []string{evt.Tags.GetD()}}
}

View File

@@ -21,7 +21,7 @@ func TestBasicStuff(t *testing.T) {
if i%3 == 0 {
kind = 12
}
evt := nostr.Event{CreatedAt: nostr.Timestamp(v), Kind: uint16(kind)}
evt := nostr.Event{CreatedAt: nostr.Timestamp(v), Kind: nostr.Kind(kind)}
evt.Sign(nostr.Generate())
ss.SaveEvent(evt)
}
@@ -38,7 +38,7 @@ func TestBasicStuff(t *testing.T) {
until := nostr.Timestamp(9999)
list = make([]nostr.Event, 0, 7)
for event := range ss.QueryEvents(nostr.Filter{Limit: 15, Until: &until, Kinds: []uint16{11}}) {
for event := range ss.QueryEvents(nostr.Filter{Limit: 15, Until: &until, Kinds: []nostr.Kind{11}}) {
list = append(list, event)
}
if len(list) != 7 {

View File

@@ -52,7 +52,7 @@ func runBenchmarkOn(b *testing.B, db eventstore.Store) {
{"e", hex.EncodeToString(eTag)},
{"p", ref.Hex()},
},
Kind: uint16(i % 10),
Kind: nostr.Kind(i % 10),
}
sk := sk3
if i%3 == 0 {
@@ -63,24 +63,24 @@ func runBenchmarkOn(b *testing.B, db eventstore.Store) {
}
filters := make([]nostr.Filter, 0, 10)
filters = append(filters, nostr.Filter{Kinds: []uint16{1, 4, 8, 16}})
filters = append(filters, nostr.Filter{Kinds: []nostr.Kind{1, 4, 8, 16}})
pk3 := nostr.GetPublicKey(sk3)
filters = append(filters, nostr.Filter{Authors: []nostr.PubKey{pk3, nostr.Generate().Public()}})
filters = append(filters, nostr.Filter{Authors: []nostr.PubKey{pk3, nostr.Generate().Public()}, Kinds: []uint16{3, 4}})
filters = append(filters, nostr.Filter{Authors: []nostr.PubKey{pk3, nostr.Generate().Public()}, Kinds: []nostr.Kind{3, 4}})
filters = append(filters, nostr.Filter{})
filters = append(filters, nostr.Filter{Limit: 20})
filters = append(filters, nostr.Filter{Kinds: []uint16{8, 9}, Tags: nostr.TagMap{"p": []string{pk3.Hex()}}})
filters = append(filters, nostr.Filter{Kinds: []nostr.Kind{8, 9}, Tags: nostr.TagMap{"p": []string{pk3.Hex()}}})
pk4 := nostr.GetPublicKey(sk4)
filters = append(filters, nostr.Filter{Kinds: []uint16{8, 9}, Tags: nostr.TagMap{"p": []string{pk3.Hex(), pk4.Hex()}}})
filters = append(filters, nostr.Filter{Kinds: []uint16{8, 9}, Tags: nostr.TagMap{"p": []string{pk3.Hex(), pk4.Hex()}}})
filters = append(filters, nostr.Filter{Kinds: []nostr.Kind{8, 9}, Tags: nostr.TagMap{"p": []string{pk3.Hex(), pk4.Hex()}}})
filters = append(filters, nostr.Filter{Kinds: []nostr.Kind{8, 9}, Tags: nostr.TagMap{"p": []string{pk3.Hex(), pk4.Hex()}}})
eTags := make([]string, 20)
for i := 0; i < 20; i++ {
eTag := make([]byte, 32)
binary.BigEndian.PutUint16(eTag, uint16(i))
eTags[i] = hex.EncodeToString(eTag)
}
filters = append(filters, nostr.Filter{Kinds: []uint16{9}, Tags: nostr.TagMap{"e": eTags}})
filters = append(filters, nostr.Filter{Kinds: []uint16{5}, Tags: nostr.TagMap{"e": eTags, "t": []string{"t5"}}})
filters = append(filters, nostr.Filter{Kinds: []nostr.Kind{9}, Tags: nostr.TagMap{"e": eTags}})
filters = append(filters, nostr.Filter{Kinds: []nostr.Kind{5}, Tags: nostr.TagMap{"e": eTags, "t": []string{"t5"}}})
filters = append(filters, nostr.Filter{Tags: nostr.TagMap{"e": eTags}})
filters = append(filters, nostr.Filter{Tags: nostr.TagMap{"e": eTags}, Limit: 50})

View File

@@ -73,7 +73,7 @@ func runFirstTestOn(t *testing.T, db eventstore.Store) {
}
{
results := slices.Collect(db.QueryEvents(nostr.Filter{Kinds: []uint16{1}}))
results := slices.Collect(db.QueryEvents(nostr.Filter{Kinds: []nostr.Kind{1}}))
require.ElementsMatch(t,
[]nostr.Event{allEvents[1], allEvents[3], allEvents[5], allEvents[7], allEvents[9]},
results,
@@ -81,7 +81,7 @@ func runFirstTestOn(t *testing.T, db eventstore.Store) {
}
{
results := slices.Collect(db.QueryEvents(nostr.Filter{Kinds: []uint16{9}}))
results := slices.Collect(db.QueryEvents(nostr.Filter{Kinds: []nostr.Kind{9}}))
require.ElementsMatch(t,
[]nostr.Event{allEvents[0], allEvents[2], allEvents[4], allEvents[6], allEvents[8]},
results,
@@ -99,7 +99,7 @@ func runFirstTestOn(t *testing.T, db eventstore.Store) {
{
pk3 := nostr.GetPublicKey(sk3)
results := slices.Collect(db.QueryEvents(nostr.Filter{Kinds: []uint16{9}, Authors: []nostr.PubKey{pk3}}))
results := slices.Collect(db.QueryEvents(nostr.Filter{Kinds: []nostr.Kind{9}, Authors: []nostr.PubKey{pk3}}))
require.ElementsMatch(t,
[]nostr.Event{allEvents[2], allEvents[4], allEvents[8]},
results,
@@ -109,8 +109,7 @@ func runFirstTestOn(t *testing.T, db eventstore.Store) {
{
pk3 := nostr.GetPublicKey(sk3)
pk4 := nostr.GetPublicKey(sk4)
pk4[1] = 9 // this is so it doesn't match
results := slices.Collect(db.QueryEvents(nostr.Filter{Kinds: []uint16{9, 5, 7}, Authors: []nostr.PubKey{pk3, pk4}}))
results := slices.Collect(db.QueryEvents(nostr.Filter{Kinds: []nostr.Kind{9, 5, 7}, Authors: []nostr.PubKey{pk3, pk4}}))
require.ElementsMatch(t,
[]nostr.Event{allEvents[0], allEvents[2], allEvents[4], allEvents[6], allEvents[8]},
results,
@@ -183,17 +182,18 @@ func runFirstTestOn(t *testing.T, db eventstore.Store) {
}
sk := nostr.Generate()
for _, newEvent := range newEvents {
newEvent.Sign(sk)
require.NoError(t, db.SaveEvent(newEvent))
for i := range newEvents {
newEvents[i].Sign(sk)
require.NoError(t, db.SaveEvent(newEvents[i]))
}
{
results := slices.Collect(db.QueryEvents(nostr.Filter{Tags: nostr.TagMap{"p": []string{p}}, Kinds: []uint16{1984}, Limit: 2}))
results := slices.Collect(db.QueryEvents(nostr.Filter{Tags: nostr.TagMap{"p": []string{p}}, Kinds: []nostr.Kind{1984}, Limit: 2}))
require.ElementsMatch(t,
[]nostr.Event{newEvents[2], newEvents[1]},
results,
"'p' tag 1 query error")
"'p' tag 1 query error",
)
}
{
@@ -206,7 +206,7 @@ func runFirstTestOn(t *testing.T, db eventstore.Store) {
}
{
results := slices.Collect(db.QueryEvents(nostr.Filter{Tags: nostr.TagMap{"p": []string{p, p2}}, Kinds: []uint16{1}, Limit: 4}))
results := slices.Collect(db.QueryEvents(nostr.Filter{Tags: nostr.TagMap{"p": []string{p, p2}}, Kinds: []nostr.Kind{1}, Limit: 4}))
for _, idx := range []int{5, 6, 7} {
require.True(t,
slices.ContainsFunc(

View File

@@ -17,7 +17,7 @@ func manyAuthorsTest(t *testing.T, db eventstore.Store) {
const total = 10000
const limit = 500
const authors = 1700
kinds := []uint16{6, 7, 8}
kinds := []nostr.Kind{6, 7, 8}
bigfilter := nostr.Filter{
Authors: make([]nostr.PubKey, authors),
@@ -40,7 +40,7 @@ func manyAuthorsTest(t *testing.T, db eventstore.Store) {
CreatedAt: nostr.Timestamp(i*i) / 4,
Content: fmt.Sprintf("lots of stuff %d", i),
Tags: nostr.Tags{},
Kind: uint16(i % 10),
Kind: nostr.Kind(i % 10),
}
err := evt.Sign([32]byte(sk))
require.NoError(t, err)

View File

@@ -32,7 +32,7 @@ func runSecondTestOn(t *testing.T, db eventstore.Store) {
{"e", hex.EncodeToString(eTag)},
{"p", ref.Hex()},
},
Kind: uint16(i % 10),
Kind: nostr.Kind(i % 10),
}
sk := sk3
if i%3 == 0 {
@@ -52,19 +52,20 @@ func runSecondTestOn(t *testing.T, db eventstore.Store) {
eTags[i] = hex.EncodeToString(eTag)
}
filters := make([]nostr.Filter, 0, 10)
filters = append(filters, nostr.Filter{Kinds: []uint16{1, 4, 8, 16}})
filters = append(filters, nostr.Filter{Authors: []nostr.PubKey{pk3, nostr.Generate().Public()}})
filters = append(filters, nostr.Filter{Authors: []nostr.PubKey{pk3, nostr.Generate().Public()}, Kinds: []uint16{3, 4}})
filters = append(filters, nostr.Filter{})
filters = append(filters, nostr.Filter{Limit: 20})
filters = append(filters, nostr.Filter{Kinds: []uint16{8, 9}, Tags: nostr.TagMap{"p": []string{pk3.Hex()}}})
filters = append(filters, nostr.Filter{Kinds: []uint16{8, 9}, Tags: nostr.TagMap{"p": []string{pk3.Hex(), pk4.Hex()}}})
filters = append(filters, nostr.Filter{Kinds: []uint16{8, 9}, Tags: nostr.TagMap{"p": []string{pk3.Hex(), pk4.Hex()}}})
filters = append(filters, nostr.Filter{Kinds: []uint16{9}, Tags: nostr.TagMap{"e": eTags}})
filters = append(filters, nostr.Filter{Kinds: []uint16{5}, Tags: nostr.TagMap{"e": eTags, "t": []string{"t5"}}})
filters = append(filters, nostr.Filter{Tags: nostr.TagMap{"e": eTags}})
filters = append(filters, nostr.Filter{Tags: nostr.TagMap{"e": eTags}, Limit: 50})
filters := []nostr.Filter{
{Kinds: []nostr.Kind{1, 4, 8, 16}},
{Authors: []nostr.PubKey{pk3, nostr.Generate().Public()}},
{Authors: []nostr.PubKey{pk3, nostr.Generate().Public()}, Kinds: []nostr.Kind{3, 4}},
{},
{Limit: 20},
{Kinds: []nostr.Kind{8, 9}, Tags: nostr.TagMap{"p": []string{pk3.Hex()}}},
{Kinds: []nostr.Kind{8, 9}, Tags: nostr.TagMap{"p": []string{pk3.Hex(), pk4.Hex()}}},
{Kinds: []nostr.Kind{8, 9}, Tags: nostr.TagMap{"p": []string{pk3.Hex(), pk4.Hex()}}},
{Kinds: []nostr.Kind{9}, Tags: nostr.TagMap{"e": eTags}},
{Kinds: []nostr.Kind{5}, Tags: nostr.TagMap{"e": eTags, "t": []string{"t5"}}},
{Tags: nostr.TagMap{"e": eTags}},
{Tags: nostr.TagMap{"e": eTags}, Limit: 50},
}
t.Run("filter", func(t *testing.T) {
for q, filter := range filters {

View File

@@ -15,7 +15,7 @@ type StorePublisher struct {
}
func (w StorePublisher) Publish(ctx context.Context, evt nostr.Event) error {
if nostr.IsEphemeralKind(evt.Kind) {
if evt.Kind.IsEphemeral() {
// do not store ephemeral events
return nil
}
@@ -23,7 +23,7 @@ func (w StorePublisher) Publish(ctx context.Context, evt nostr.Event) error {
ctx, cancel := context.WithCancel(ctx)
defer cancel()
if nostr.IsRegularKind(evt.Kind) {
if evt.Kind.IsRegular() {
// regular events are just saved directly
if err := w.SaveEvent(evt); err != nil && err != eventstore.ErrDupEvent {
return fmt.Errorf("failed to save: %w", err)

View File

@@ -44,6 +44,6 @@ func TestRelayWrapper(t *testing.T) {
}
time.Sleep(time.Millisecond * 200)
evts := slices.Collect(w.QueryEvents(nostr.Filter{Kinds: []uint16{3}}))
evts := slices.Collect(w.QueryEvents(nostr.Filter{Kinds: []nostr.Kind{3}}))
require.Len(t, evts, 1)
}

View File

@@ -8,7 +8,7 @@ import (
type Filter struct {
IDs []ID
Kinds []uint16
Kinds []Kind
Authors []PubKey
Tags TagMap
Since *Timestamp
@@ -171,7 +171,7 @@ func GetTheoreticalLimit(filter Filter) int {
if len(filter.Authors) > 0 {
allAreReplaceable := true
for _, kind := range filter.Kinds {
if !IsReplaceableKind(kind) {
if !kind.IsReplaceable() {
allAreReplaceable = false
break
}
@@ -183,7 +183,7 @@ func GetTheoreticalLimit(filter Filter) int {
if len(filter.Tags["d"]) > 0 {
allAreAddressable := true
for _, kind := range filter.Kinds {
if !IsAddressableKind(kind) {
if !kind.IsAddressable() {
allAreAddressable = false
break
}

View File

@@ -66,15 +66,15 @@ func easyjson4d398eaaDecodeGithubComNbdWtfGoNostr(in *jlexer.Lexer, out *Filter)
in.Delim('[')
if out.Kinds == nil {
if !in.IsDelim(']') {
out.Kinds = make([]uint16, 0, 8)
out.Kinds = make([]Kind, 0, 8)
} else {
out.Kinds = []uint16{}
out.Kinds = []Kind{}
}
} else {
out.Kinds = (out.Kinds)[:0]
}
for !in.IsDelim(']') {
out.Kinds = append(out.Kinds, uint16(in.Int()))
out.Kinds = append(out.Kinds, Kind(in.Int()))
in.WantComma()
}
in.Delim(']')

View File

@@ -28,7 +28,7 @@ func TestFilterUnmarshal(t *testing.T) {
func TestFilterMarshal(t *testing.T) {
until := Timestamp(12345678)
filterj, err := json.Marshal(Filter{
Kinds: []uint16{KindTextNote, KindRecommendServer, KindEncryptedDirectMessage},
Kinds: []Kind{KindTextNote, KindRecommendServer, KindEncryptedDirectMessage},
Tags: TagMap{"fruit": {"banana", "mango"}},
Until: &until,
})
@@ -60,7 +60,7 @@ func TestFilterUnmarshalWithLimitZero(t *testing.T) {
func TestFilterMarshalWithLimitZero(t *testing.T) {
until := Timestamp(12345678)
filterj, err := json.Marshal(Filter{
Kinds: []uint16{KindTextNote, KindRecommendServer, KindEncryptedDirectMessage},
Kinds: []Kind{KindTextNote, KindRecommendServer, KindEncryptedDirectMessage},
Tags: TagMap{"fruit": {"banana", "mango"}},
Until: &until,
LimitZero: true,
@@ -83,25 +83,25 @@ func TestFilterMatchingLive(t *testing.T) {
func TestFilterEquality(t *testing.T) {
assert.True(t, FilterEqual(
Filter{Kinds: []uint16{KindEncryptedDirectMessage, KindDeletion}},
Filter{Kinds: []uint16{KindEncryptedDirectMessage, KindDeletion}},
Filter{Kinds: []Kind{KindEncryptedDirectMessage, KindDeletion}},
Filter{Kinds: []Kind{KindEncryptedDirectMessage, KindDeletion}},
), "kinds filters should be equal")
assert.True(t, FilterEqual(
Filter{Kinds: []uint16{KindEncryptedDirectMessage, KindDeletion}, Tags: TagMap{"letter": {"a", "b"}}},
Filter{Kinds: []uint16{KindEncryptedDirectMessage, KindDeletion}, Tags: TagMap{"letter": {"b", "a"}}},
Filter{Kinds: []Kind{KindEncryptedDirectMessage, KindDeletion}, Tags: TagMap{"letter": {"a", "b"}}},
Filter{Kinds: []Kind{KindEncryptedDirectMessage, KindDeletion}, Tags: TagMap{"letter": {"b", "a"}}},
), "kind+tags filters should be equal")
tm := Now()
assert.True(t, FilterEqual(
Filter{
Kinds: []uint16{KindEncryptedDirectMessage, KindDeletion},
Kinds: []Kind{KindEncryptedDirectMessage, KindDeletion},
Tags: TagMap{"letter": {"a", "b"}, "fruit": {"banana"}},
Since: &tm,
IDs: []ID{{'a', 'a'}, {'b', 'b'}},
},
Filter{
Kinds: []uint16{KindDeletion, KindEncryptedDirectMessage},
Kinds: []Kind{KindDeletion, KindEncryptedDirectMessage},
Tags: TagMap{"letter": {"a", "b"}, "fruit": {"banana"}},
Since: &tm,
IDs: []ID{{'a', 'a'}, {'b', 'b'}},
@@ -109,15 +109,15 @@ func TestFilterEquality(t *testing.T) {
), "kind+2tags+since+ids filters should be equal")
assert.False(t, FilterEqual(
Filter{Kinds: []uint16{KindTextNote, KindEncryptedDirectMessage, KindDeletion}},
Filter{Kinds: []uint16{KindEncryptedDirectMessage, KindDeletion, KindRepost}},
Filter{Kinds: []Kind{KindTextNote, KindEncryptedDirectMessage, KindDeletion}},
Filter{Kinds: []Kind{KindEncryptedDirectMessage, KindDeletion, KindRepost}},
), "kinds filters shouldn't be equal")
}
func TestFilterClone(t *testing.T) {
ts := Now() - 60*60
flt := Filter{
Kinds: []uint16{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
Kinds: []Kind{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
Tags: TagMap{"letter": {"a", "b"}, "fruit": {"banana"}},
Since: &ts,
IDs: []ID{MustIDFromHex("9894b4b5cb5166d23ee8899a4151cf0c66aec00bde101982a13b8e8ceb972df9")},
@@ -144,10 +144,10 @@ func TestFilterClone(t *testing.T) {
func TestTheoreticalLimit(t *testing.T) {
require.Equal(t, 6, GetTheoreticalLimit(Filter{IDs: []ID{{'a'}, {'b'}, {'c'}, {'d'}, {'e'}, {'f'}}}))
require.Equal(t, 9, GetTheoreticalLimit(Filter{Authors: []PubKey{{'a'}, {'b'}, {'c'}}, Kinds: []uint16{3, 0, 10002}}))
require.Equal(t, 4, GetTheoreticalLimit(Filter{Authors: []PubKey{{'a'}, {'b'}, {'c'}, {'d'}}, Kinds: []uint16{10050}}))
require.Equal(t, 9, GetTheoreticalLimit(Filter{Authors: []PubKey{{'a'}, {'b'}, {'c'}}, Kinds: []Kind{3, 0, 10002}}))
require.Equal(t, 4, GetTheoreticalLimit(Filter{Authors: []PubKey{{'a'}, {'b'}, {'c'}, {'d'}}, Kinds: []Kind{10050}}))
require.Equal(t, -1, GetTheoreticalLimit(Filter{Authors: []PubKey{{'a'}, {'b'}, {'c'}, {'d'}}}))
require.Equal(t, -1, GetTheoreticalLimit(Filter{Kinds: []uint16{3, 0, 10002}}))
require.Equal(t, 24, GetTheoreticalLimit(Filter{Authors: []PubKey{{'a'}, {'b'}, {'c'}, {'d'}, {'e'}, {'f'}}, Kinds: []uint16{30023, 30024}, Tags: TagMap{"d": []string{"aaa", "bbb"}}}))
require.Equal(t, -1, GetTheoreticalLimit(Filter{Authors: []PubKey{{'a'}, {'b'}, {'c'}, {'d'}, {'e'}, {'f'}}, Kinds: []uint16{30023, 30024}}))
require.Equal(t, -1, GetTheoreticalLimit(Filter{Kinds: []Kind{3, 0, 10002}}))
require.Equal(t, 24, GetTheoreticalLimit(Filter{Authors: []PubKey{{'a'}, {'b'}, {'c'}, {'d'}, {'e'}, {'f'}}, Kinds: []Kind{30023, 30024}, Tags: TagMap{"d": []string{"aaa", "bbb"}}}))
require.Equal(t, -1, GetTheoreticalLimit(Filter{Authors: []PubKey{{'a'}, {'b'}, {'c'}, {'d'}, {'e'}, {'f'}}, Kinds: []Kind{30023, 30024}}))
}

View File

@@ -11,7 +11,7 @@ import (
// AddEvent sends an event through then normal add pipeline, as if it was received from a websocket.
func (rl *Relay) AddEvent(ctx context.Context, evt nostr.Event) (skipBroadcast bool, writeError error) {
if nostr.IsEphemeralKind(evt.Kind) {
if evt.Kind.IsEphemeral() {
return false, rl.handleEphemeral(ctx, evt)
} else {
return rl.handleNormal(ctx, evt)
@@ -31,7 +31,7 @@ func (rl *Relay) handleNormal(ctx context.Context, evt nostr.Event) (skipBroadca
// will store
// regular kinds are just saved directly
if nostr.IsRegularKind(evt.Kind) {
if evt.Kind.IsRegular() {
if nil != rl.StoreEvent {
if err := rl.StoreEvent(ctx, evt); err != nil {
switch err {

View File

@@ -18,7 +18,7 @@ type EventStoreBlobIndexWrapper struct {
func (es EventStoreBlobIndexWrapper) Keep(ctx context.Context, blob BlobDescriptor, pubkey nostr.PubKey) error {
next, stop := iter.Pull(
es.Store.QueryEvents(nostr.Filter{Authors: []nostr.PubKey{pubkey}, Kinds: []uint16{24242}, Tags: nostr.TagMap{"x": []string{blob.SHA256}}}),
es.Store.QueryEvents(nostr.Filter{Authors: []nostr.PubKey{pubkey}, Kinds: []nostr.Kind{24242}, Tags: nostr.TagMap{"x": []string{blob.SHA256}}}),
)
defer stop()
@@ -45,7 +45,7 @@ func (es EventStoreBlobIndexWrapper) List(ctx context.Context, pubkey nostr.PubK
return func(yield func(BlobDescriptor) bool) {
for evt := range es.Store.QueryEvents(nostr.Filter{
Authors: []nostr.PubKey{pubkey},
Kinds: []uint16{24242},
Kinds: []nostr.Kind{24242},
}) {
yield(es.parseEvent(evt))
}
@@ -54,7 +54,7 @@ func (es EventStoreBlobIndexWrapper) List(ctx context.Context, pubkey nostr.PubK
func (es EventStoreBlobIndexWrapper) Get(ctx context.Context, sha256 string) (*BlobDescriptor, error) {
next, stop := iter.Pull(
es.Store.QueryEvents(nostr.Filter{Tags: nostr.TagMap{"x": []string{sha256}}, Kinds: []uint16{24242}, Limit: 1}),
es.Store.QueryEvents(nostr.Filter{Tags: nostr.TagMap{"x": []string{sha256}}, Kinds: []nostr.Kind{24242}, Limit: 1}),
)
defer stop()
@@ -72,7 +72,7 @@ func (es EventStoreBlobIndexWrapper) Delete(ctx context.Context, sha256 string,
es.Store.QueryEvents(nostr.Filter{
Authors: []nostr.PubKey{pubkey},
Tags: nostr.TagMap{"x": []string{sha256}},
Kinds: []uint16{24242},
Kinds: []nostr.Kind{24242},
Limit: 1,
},
),

View File

@@ -38,7 +38,7 @@ func (rl *Relay) handleDeleteRequest(ctx context.Context, evt nostr.Event) error
identifier := spl[2]
f = nostr.Filter{
Kinds: []uint16{uint16(kind)},
Kinds: []nostr.Kind{nostr.Kind(kind)},
Authors: []nostr.PubKey{author},
Tags: nostr.TagMap{"d": []string{identifier}},
Until: &evt.CreatedAt,

View File

@@ -214,7 +214,7 @@ func (rl *Relay) HandleWebsocket(w http.ResponseWriter, r *http.Request) {
if env.Event.Kind == 5 {
// this always returns "blocked: " whenever it returns an error
writeErr = srl.handleDeleteRequest(ctx, env.Event)
} else if nostr.IsEphemeralKind(env.Event.Kind) {
} else if env.Event.Kind.IsEphemeral() {
// this will also always return a prefixed reason
writeErr = srl.handleEphemeral(ctx, env.Event)
} else {
@@ -229,7 +229,7 @@ func (rl *Relay) HandleWebsocket(w http.ResponseWriter, r *http.Request) {
n := srl.notifyListeners(env.Event)
// the number of notified listeners matters in ephemeral events
if nostr.IsEphemeralKind(env.Event.Kind) {
if env.Event.Kind.IsEphemeral() {
if n == 0 {
ok = false
reason = "mute: no one was listening for this"

View File

@@ -17,7 +17,7 @@ func FuzzRandomListenerClientRemoving(f *testing.F) {
rl := NewRelay()
f := nostr.Filter{Kinds: []uint16{1}}
f := nostr.Filter{Kinds: []nostr.Kind{1}}
cancel := func(cause error) {}
websockets := make([]*WebSocket, 0, totalWebsockets*baseSubs)
@@ -71,7 +71,7 @@ func FuzzRandomListenerIdRemoving(f *testing.F) {
rl := NewRelay()
f := nostr.Filter{Kinds: []uint16{1}}
f := nostr.Filter{Kinds: []nostr.Kind{1}}
cancel := func(cause error) {}
websockets := make([]*WebSocket, 0, totalWebsockets)
@@ -150,7 +150,7 @@ func FuzzRouterListenersPabloCrash(f *testing.F) {
rl.clients[ws] = make([]listenerSpec, 0, subIterations)
}
f := nostr.Filter{Kinds: []uint16{1}}
f := nostr.Filter{Kinds: []nostr.Kind{1}}
cancel := func(cause error) {}
type wsid struct {

View File

@@ -29,9 +29,9 @@ func TestListenerSetupAndRemoveOnce(t *testing.T) {
ws1 := &WebSocket{}
ws2 := &WebSocket{}
f1 := nostr.Filter{Kinds: []uint16{1}}
f2 := nostr.Filter{Kinds: []uint16{2}}
f3 := nostr.Filter{Kinds: []uint16{3}}
f1 := nostr.Filter{Kinds: []nostr.Kind{1}}
f2 := nostr.Filter{Kinds: []nostr.Kind{2}}
f3 := nostr.Filter{Kinds: []nostr.Kind{3}}
rl.clients[ws1] = nil
rl.clients[ws2] = nil
@@ -86,9 +86,9 @@ func TestListenerMoreConvolutedCase(t *testing.T) {
ws3 := &WebSocket{}
ws4 := &WebSocket{}
f1 := nostr.Filter{Kinds: []uint16{1}}
f2 := nostr.Filter{Kinds: []uint16{2}}
f3 := nostr.Filter{Kinds: []uint16{3}}
f1 := nostr.Filter{Kinds: []nostr.Kind{1}}
f2 := nostr.Filter{Kinds: []nostr.Kind{2}}
f3 := nostr.Filter{Kinds: []nostr.Kind{3}}
rl.clients[ws1] = nil
rl.clients[ws2] = nil
@@ -205,9 +205,9 @@ func TestListenerMoreStuffWithMultipleRelays(t *testing.T) {
ws3 := &WebSocket{}
ws4 := &WebSocket{}
f1 := nostr.Filter{Kinds: []uint16{1}}
f2 := nostr.Filter{Kinds: []uint16{2}}
f3 := nostr.Filter{Kinds: []uint16{3}}
f1 := nostr.Filter{Kinds: []nostr.Kind{1}}
f2 := nostr.Filter{Kinds: []nostr.Kind{2}}
f3 := nostr.Filter{Kinds: []nostr.Kind{3}}
rlx := NewRelay()
rly := NewRelay()
@@ -424,7 +424,7 @@ func TestListenerMoreStuffWithMultipleRelays(t *testing.T) {
func TestRandomListenerClientRemoving(t *testing.T) {
rl := NewRelay()
f := nostr.Filter{Kinds: []uint16{1}}
f := nostr.Filter{Kinds: []nostr.Kind{1}}
cancel := func(cause error) {}
websockets := make([]*WebSocket, 0, 20)
@@ -463,7 +463,7 @@ func TestRandomListenerClientRemoving(t *testing.T) {
func TestRandomListenerIdRemoving(t *testing.T) {
rl := NewRelay()
f := nostr.Filter{Kinds: []uint16{1}}
f := nostr.Filter{Kinds: []nostr.Kind{1}}
cancel := func(cause error) {}
websockets := make([]*WebSocket, 0, 20)
@@ -531,7 +531,7 @@ func TestRouterListenersPabloCrash(t *testing.T) {
rl.clients[ws2] = nil
rl.clients[ws3] = nil
f := nostr.Filter{Kinds: []uint16{1}}
f := nostr.Filter{Kinds: []nostr.Kind{1}}
cancel := func(cause error) {}
rl.addListener(ws1, ":1", rla, f, cancel)

View File

@@ -16,19 +16,19 @@ import (
//
// If ignoreKinds is given this restriction will not apply to these kinds (useful for allowing a bigger).
// If onlyKinds is given then all other kinds will be ignored.
func PreventTooManyIndexableTags(max int, ignoreKinds []uint16, onlyKinds []uint16) func(context.Context, nostr.Event) (bool, string) {
func PreventTooManyIndexableTags(max int, ignoreKinds []nostr.Kind, onlyKinds []nostr.Kind) func(context.Context, nostr.Event) (bool, string) {
slices.Sort(ignoreKinds)
slices.Sort(onlyKinds)
ignore := func(kind uint16) bool { return false }
ignore := func(kind nostr.Kind) bool { return false }
if len(ignoreKinds) > 0 {
ignore = func(kind uint16) bool {
ignore = func(kind nostr.Kind) bool {
_, isIgnored := slices.BinarySearch(ignoreKinds, kind)
return isIgnored
}
}
if len(onlyKinds) > 0 {
ignore = func(kind uint16) bool {
ignore = func(kind nostr.Kind) bool {
_, isApplicable := slices.BinarySearch(onlyKinds, kind)
return !isApplicable
}
@@ -68,16 +68,16 @@ func PreventLargeTags(maxTagValueLen int) func(context.Context, nostr.Event) (bo
// RestrictToSpecifiedKinds returns a function that can be used as a RejectFilter that will reject
// any events with kinds different than the specified ones.
func RestrictToSpecifiedKinds(allowEphemeral bool, kinds ...uint16) func(context.Context, nostr.Event) (bool, string) {
func RestrictToSpecifiedKinds(allowEphemeral bool, kinds ...nostr.Kind) func(context.Context, nostr.Event) (bool, string) {
// sort the kinds in increasing order
slices.Sort(kinds)
return func(ctx context.Context, event nostr.Event) (reject bool, msg string) {
if allowEphemeral && nostr.IsEphemeralKind(event.Kind) {
if allowEphemeral && event.Kind.IsEphemeral() {
return false, ""
}
if _, allowed := slices.BinarySearch(kinds, uint16(event.Kind)); allowed {
if _, allowed := slices.BinarySearch(kinds, nostr.Kind(event.Kind)); allowed {
return false, ""
}

View File

@@ -60,12 +60,12 @@ func RemoveSearchQueries(ctx context.Context, filter *nostr.Filter) {
}
}
func RemoveAllButKinds(kinds ...uint16) func(context.Context, *nostr.Filter) {
func RemoveAllButKinds(kinds ...nostr.Kind) func(context.Context, *nostr.Filter) {
return func(ctx context.Context, filter *nostr.Filter) {
if n := len(filter.Kinds); n > 0 {
newKinds := make([]uint16, 0, n)
newKinds := make([]nostr.Kind, 0, n)
for i := 0; i < n; i++ {
if k := filter.Kinds[i]; slices.Contains(kinds, uint16(k)) {
if k := filter.Kinds[i]; slices.Contains(kinds, nostr.Kind(k)) {
newKinds = append(newKinds, k)
}
}

View File

@@ -37,7 +37,7 @@ func FuzzReplaceableEvents(f *testing.F) {
pk1 := nostr.GetPublicKey(sk1)
// helper to create signed events
createEvent := func(sk nostr.SecretKey, kind uint16, content string, tags nostr.Tags) nostr.Event {
createEvent := func(sk nostr.SecretKey, kind nostr.Kind, content string, tags nostr.Tags) nostr.Event {
pk := nostr.GetPublicKey(sk)
evt := nostr.Event{
PubKey: pk,
@@ -87,7 +87,7 @@ func FuzzReplaceableEvents(f *testing.F) {
// query to verify only the newest event exists
sub, err := client2.Subscribe(ctx, nostr.Filter{
Authors: []nostr.PubKey{pk1},
Kinds: []uint16{0},
Kinds: []nostr.Kind{0},
}, nostr.SubscriptionOptions{})
if err != nil {
t.Fatalf("failed to subscribe: %v", err)

View File

@@ -30,7 +30,7 @@ func TestBasicRelayFunctionality(t *testing.T) {
pk2 := nostr.GetPublicKey(sk2)
// helper to create signed events
createEvent := func(sk nostr.SecretKey, kind uint16, content string, tags nostr.Tags) nostr.Event {
createEvent := func(sk nostr.SecretKey, kind nostr.Kind, content string, tags nostr.Tags) nostr.Event {
pk := nostr.GetPublicKey(sk)
evt := nostr.Event{
PubKey: pk,
@@ -71,7 +71,7 @@ func TestBasicRelayFunctionality(t *testing.T) {
// Query the event back
sub, err := client2.Subscribe(ctx, nostr.Filter{
Authors: []nostr.PubKey{pk1},
Kinds: []uint16{1},
Kinds: []nostr.Kind{1},
}, nostr.SubscriptionOptions{})
if err != nil {
t.Fatalf("failed to subscribe: %v", err)
@@ -97,7 +97,7 @@ func TestBasicRelayFunctionality(t *testing.T) {
// Setup subscription first
sub, err := client1.Subscribe(ctx, nostr.Filter{
Authors: []nostr.PubKey{pk2},
Kinds: []uint16{1},
Kinds: []nostr.Kind{1},
}, nostr.SubscriptionOptions{})
if err != nil {
t.Fatalf("failed to subscribe: %v", err)
@@ -202,7 +202,7 @@ func TestBasicRelayFunctionality(t *testing.T) {
// query to verify only the newest event exists
sub, err := client2.Subscribe(ctx, nostr.Filter{
Authors: []nostr.PubKey{pk1},
Kinds: []uint16{0},
Kinds: []nostr.Kind{0},
}, nostr.SubscriptionOptions{})
if err != nil {
t.Fatalf("failed to subscribe: %v", err)

548
kinds.go
View File

@@ -1,152 +1,428 @@
package nostr
import "strconv"
type Kind uint16
func (kind Kind) Num() uint16 { return uint16(kind) }
func (kind Kind) String() string { return "kind::" + kind.Name() + "<" + strconv.Itoa(int(kind)) + ">" }
func (kind Kind) Name() string {
switch kind {
case KindProfileMetadata:
return "ProfileMetadata"
case KindTextNote:
return "TextNote"
case KindRecommendServer:
return "RecommendServer"
case KindFollowList:
return "FollowList"
case KindEncryptedDirectMessage:
return "EncryptedDirectMessage"
case KindDeletion:
return "Deletion"
case KindRepost:
return "Repost"
case KindReaction:
return "Reaction"
case KindBadgeAward:
return "BadgeAward"
case KindSimpleGroupChatMessage:
return "SimpleGroupChatMessage"
case KindSimpleGroupThreadedReply:
return "SimpleGroupThreadedReply"
case KindSimpleGroupThread:
return "SimpleGroupThread"
case KindSimpleGroupReply:
return "SimpleGroupReply"
case KindSeal:
return "Seal"
case KindDirectMessage:
return "DirectMessage"
case KindGenericRepost:
return "GenericRepost"
case KindReactionToWebsite:
return "ReactionToWebsite"
case KindChannelCreation:
return "ChannelCreation"
case KindChannelMetadata:
return "ChannelMetadata"
case KindChannelMessage:
return "ChannelMessage"
case KindChannelHideMessage:
return "ChannelHideMessage"
case KindChannelMuteUser:
return "ChannelMuteUser"
case KindChess:
return "Chess"
case KindMergeRequests:
return "MergeRequests"
case KindComment:
return "Comment"
case KindBid:
return "Bid"
case KindBidConfirmation:
return "BidConfirmation"
case KindOpenTimestamps:
return "OpenTimestamps"
case KindGiftWrap:
return "GiftWrap"
case KindFileMetadata:
return "FileMetadata"
case KindLiveChatMessage:
return "LiveChatMessage"
case KindPatch:
return "Patch"
case KindIssue:
return "Issue"
case KindReply:
return "Reply"
case KindStatusOpen:
return "StatusOpen"
case KindStatusApplied:
return "StatusApplied"
case KindStatusClosed:
return "StatusClosed"
case KindStatusDraft:
return "StatusDraft"
case KindProblemTracker:
return "ProblemTracker"
case KindReporting:
return "Reporting"
case KindLabel:
return "Label"
case KindRelayReviews:
return "RelayReviews"
case KindAIEmbeddings:
return "AIEmbeddings"
case KindTorrent:
return "Torrent"
case KindTorrentComment:
return "TorrentComment"
case KindCoinjoinPool:
return "CoinjoinPool"
case KindCommunityPostApproval:
return "CommunityPostApproval"
case KindJobFeedback:
return "JobFeedback"
case KindSimpleGroupPutUser:
return "SimpleGroupPutUser"
case KindSimpleGroupRemoveUser:
return "SimpleGroupRemoveUser"
case KindSimpleGroupEditMetadata:
return "SimpleGroupEditMetadata"
case KindSimpleGroupDeleteEvent:
return "SimpleGroupDeleteEvent"
case KindSimpleGroupCreateGroup:
return "SimpleGroupCreateGroup"
case KindSimpleGroupDeleteGroup:
return "SimpleGroupDeleteGroup"
case KindSimpleGroupCreateInvite:
return "SimpleGroupCreateInvite"
case KindSimpleGroupJoinRequest:
return "SimpleGroupJoinRequest"
case KindSimpleGroupLeaveRequest:
return "SimpleGroupLeaveRequest"
case KindZapGoal:
return "ZapGoal"
case KindNutZap:
return "NutZap"
case KindTidalLogin:
return "TidalLogin"
case KindZapRequest:
return "ZapRequest"
case KindZap:
return "Zap"
case KindHighlights:
return "Highlights"
case KindMuteList:
return "MuteList"
case KindPinList:
return "PinList"
case KindRelayListMetadata:
return "RelayListMetadata"
case KindBookmarkList:
return "BookmarkList"
case KindCommunityList:
return "CommunityList"
case KindPublicChatList:
return "PublicChatList"
case KindBlockedRelayList:
return "BlockedRelayList"
case KindSearchRelayList:
return "SearchRelayList"
case KindSimpleGroupList:
return "SimpleGroupList"
case KindInterestList:
return "InterestList"
case KindNutZapInfo:
return "NutZapInfo"
case KindEmojiList:
return "EmojiList"
case KindDMRelayList:
return "DMRelayList"
case KindUserServerList:
return "UserServerList"
case KindFileStorageServerList:
return "FileStorageServerList"
case KindGoodWikiAuthorList:
return "GoodWikiAuthorList"
case KindGoodWikiRelayList:
return "GoodWikiRelayList"
case KindNWCWalletInfo:
return "NWCWalletInfo"
case KindLightningPubRPC:
return "LightningPubRPC"
case KindClientAuthentication:
return "ClientAuthentication"
case KindNWCWalletRequest:
return "NWCWalletRequest"
case KindNWCWalletResponse:
return "NWCWalletResponse"
case KindNostrConnect:
return "NostrConnect"
case KindBlobs:
return "Blobs"
case KindHTTPAuth:
return "HTTPAuth"
case KindCategorizedPeopleList:
return "CategorizedPeopleList"
case KindCategorizedBookmarksList:
return "CategorizedBookmarksList"
case KindRelaySets:
return "RelaySets"
case KindBookmarkSets:
return "BookmarkSets"
case KindCuratedSets:
return "CuratedSets"
case KindCuratedVideoSets:
return "CuratedVideoSets"
case KindMuteSets:
return "MuteSets"
case KindProfileBadges:
return "ProfileBadges"
case KindBadgeDefinition:
return "BadgeDefinition"
case KindInterestSets:
return "InterestSets"
case KindStallDefinition:
return "StallDefinition"
case KindProductDefinition:
return "ProductDefinition"
case KindMarketplaceUI:
return "MarketplaceUI"
case KindProductSoldAsAuction:
return "ProductSoldAsAuction"
case KindArticle:
return "Article"
case KindDraftArticle:
return "DraftArticle"
case KindEmojiSets:
return "EmojiSets"
case KindModularArticleHeader:
return "ModularArticleHeader"
case KindModularArticleContent:
return "ModularArticleContent"
case KindReleaseArtifactSets:
return "ReleaseArtifactSets"
case KindApplicationSpecificData:
return "ApplicationSpecificData"
case KindLiveEvent:
return "LiveEvent"
case KindUserStatuses:
return "UserStatuses"
case KindClassifiedListing:
return "ClassifiedListing"
case KindDraftClassifiedListing:
return "DraftClassifiedListing"
case KindRepositoryAnnouncement:
return "RepositoryAnnouncement"
case KindRepositoryState:
return "RepositoryState"
case KindSimpleGroupMetadata:
return "SimpleGroupMetadata"
case KindSimpleGroupAdmins:
return "SimpleGroupAdmins"
case KindSimpleGroupMembers:
return "SimpleGroupMembers"
case KindSimpleGroupRoles:
return "SimpleGroupRoles"
case KindWikiArticle:
return "WikiArticle"
case KindRedirects:
return "Redirects"
case KindFeed:
return "Feed"
case KindDateCalendarEvent:
return "DateCalendarEvent"
case KindTimeCalendarEvent:
return "TimeCalendarEvent"
case KindCalendar:
return "Calendar"
case KindCalendarEventRSVP:
return "CalendarEventRSVP"
case KindHandlerRecommendation:
return "HandlerRecommendation"
case KindHandlerInformation:
return "HandlerInformation"
case KindVideoEvent:
return "VideoEvent"
case KindShortVideoEvent:
return "ShortVideoEvent"
case KindVideoViewEvent:
return "VideoViewEvent"
case KindCommunityDefinition:
return "CommunityDefinition"
}
return "unknown"
}
const (
KindProfileMetadata uint16 = 0
KindTextNote uint16 = 1
KindRecommendServer uint16 = 2
KindFollowList uint16 = 3
KindEncryptedDirectMessage uint16 = 4
KindDeletion uint16 = 5
KindRepost uint16 = 6
KindReaction uint16 = 7
KindBadgeAward uint16 = 8
KindSimpleGroupChatMessage uint16 = 9
KindSimpleGroupThreadedReply uint16 = 10
KindSimpleGroupThread uint16 = 11
KindSimpleGroupReply uint16 = 12
KindSeal uint16 = 13
KindDirectMessage uint16 = 14
KindGenericRepost uint16 = 16
KindReactionToWebsite uint16 = 17
KindChannelCreation uint16 = 40
KindChannelMetadata uint16 = 41
KindChannelMessage uint16 = 42
KindChannelHideMessage uint16 = 43
KindChannelMuteUser uint16 = 44
KindChess uint16 = 64
KindMergeRequests uint16 = 818
KindComment uint16 = 1111
KindBid uint16 = 1021
KindBidConfirmation uint16 = 1022
KindOpenTimestamps uint16 = 1040
KindGiftWrap uint16 = 1059
KindFileMetadata uint16 = 1063
KindLiveChatMessage uint16 = 1311
KindPatch uint16 = 1617
KindIssue uint16 = 1621
KindReply uint16 = 1622
KindStatusOpen uint16 = 1630
KindStatusApplied uint16 = 1631
KindStatusClosed uint16 = 1632
KindStatusDraft uint16 = 1633
KindProblemTracker uint16 = 1971
KindReporting uint16 = 1984
KindLabel uint16 = 1985
KindRelayReviews uint16 = 1986
KindAIEmbeddings uint16 = 1987
KindTorrent uint16 = 2003
KindTorrentComment uint16 = 2004
KindCoinjoinPool uint16 = 2022
KindCommunityPostApproval uint16 = 4550
KindJobFeedback uint16 = 7000
KindSimpleGroupPutUser uint16 = 9000
KindSimpleGroupRemoveUser uint16 = 9001
KindSimpleGroupEditMetadata uint16 = 9002
KindSimpleGroupDeleteEvent uint16 = 9005
KindSimpleGroupCreateGroup uint16 = 9007
KindSimpleGroupDeleteGroup uint16 = 9008
KindSimpleGroupCreateInvite uint16 = 9009
KindSimpleGroupJoinRequest uint16 = 9021
KindSimpleGroupLeaveRequest uint16 = 9022
KindZapGoal uint16 = 9041
KindNutZap uint16 = 9321
KindTidalLogin uint16 = 9467
KindZapRequest uint16 = 9734
KindZap uint16 = 9735
KindHighlights uint16 = 9802
KindMuteList uint16 = 10000
KindPinList uint16 = 10001
KindRelayListMetadata uint16 = 10002
KindBookmarkList uint16 = 10003
KindCommunityList uint16 = 10004
KindPublicChatList uint16 = 10005
KindBlockedRelayList uint16 = 10006
KindSearchRelayList uint16 = 10007
KindSimpleGroupList uint16 = 10009
KindInterestList uint16 = 10015
KindNutZapInfo uint16 = 10019
KindEmojiList uint16 = 10030
KindDMRelayList uint16 = 10050
KindUserServerList uint16 = 10063
KindFileStorageServerList uint16 = 10096
KindGoodWikiAuthorList uint16 = 10101
KindGoodWikiRelayList uint16 = 10102
KindNWCWalletInfo uint16 = 13194
KindLightningPubRPC uint16 = 21000
KindClientAuthentication uint16 = 22242
KindNWCWalletRequest uint16 = 23194
KindNWCWalletResponse uint16 = 23195
KindNostrConnect uint16 = 24133
KindBlobs uint16 = 24242
KindHTTPAuth uint16 = 27235
KindCategorizedPeopleList uint16 = 30000
KindCategorizedBookmarksList uint16 = 30001
KindRelaySets uint16 = 30002
KindBookmarkSets uint16 = 30003
KindCuratedSets uint16 = 30004
KindCuratedVideoSets uint16 = 30005
KindMuteSets uint16 = 30007
KindProfileBadges uint16 = 30008
KindBadgeDefinition uint16 = 30009
KindInterestSets uint16 = 30015
KindStallDefinition uint16 = 30017
KindProductDefinition uint16 = 30018
KindMarketplaceUI uint16 = 30019
KindProductSoldAsAuction uint16 = 30020
KindArticle uint16 = 30023
KindDraftArticle uint16 = 30024
KindEmojiSets uint16 = 30030
KindModularArticleHeader uint16 = 30040
KindModularArticleContent uint16 = 30041
KindReleaseArtifactSets uint16 = 30063
KindApplicationSpecificData uint16 = 30078
KindLiveEvent uint16 = 30311
KindUserStatuses uint16 = 30315
KindClassifiedListing uint16 = 30402
KindDraftClassifiedListing uint16 = 30403
KindRepositoryAnnouncement uint16 = 30617
KindRepositoryState uint16 = 30618
KindSimpleGroupMetadata uint16 = 39000
KindSimpleGroupAdmins uint16 = 39001
KindSimpleGroupMembers uint16 = 39002
KindSimpleGroupRoles uint16 = 39003
KindWikiArticle uint16 = 30818
KindRedirects uint16 = 30819
KindFeed uint16 = 31890
KindDateCalendarEvent uint16 = 31922
KindTimeCalendarEvent uint16 = 31923
KindCalendar uint16 = 31924
KindCalendarEventRSVP uint16 = 31925
KindHandlerRecommendation uint16 = 31989
KindHandlerInformation uint16 = 31990
KindVideoEvent uint16 = 34235
KindShortVideoEvent uint16 = 34236
KindVideoViewEvent uint16 = 34237
KindCommunityDefinition uint16 = 34550
KindProfileMetadata Kind = 0
KindTextNote Kind = 1
KindRecommendServer Kind = 2
KindFollowList Kind = 3
KindEncryptedDirectMessage Kind = 4
KindDeletion Kind = 5
KindRepost Kind = 6
KindReaction Kind = 7
KindBadgeAward Kind = 8
KindSimpleGroupChatMessage Kind = 9
KindSimpleGroupThreadedReply Kind = 10
KindSimpleGroupThread Kind = 11
KindSimpleGroupReply Kind = 12
KindSeal Kind = 13
KindDirectMessage Kind = 14
KindGenericRepost Kind = 16
KindReactionToWebsite Kind = 17
KindChannelCreation Kind = 40
KindChannelMetadata Kind = 41
KindChannelMessage Kind = 42
KindChannelHideMessage Kind = 43
KindChannelMuteUser Kind = 44
KindChess Kind = 64
KindMergeRequests Kind = 818
KindComment Kind = 1111
KindBid Kind = 1021
KindBidConfirmation Kind = 1022
KindOpenTimestamps Kind = 1040
KindGiftWrap Kind = 1059
KindFileMetadata Kind = 1063
KindLiveChatMessage Kind = 1311
KindPatch Kind = 1617
KindIssue Kind = 1621
KindReply Kind = 1622
KindStatusOpen Kind = 1630
KindStatusApplied Kind = 1631
KindStatusClosed Kind = 1632
KindStatusDraft Kind = 1633
KindProblemTracker Kind = 1971
KindReporting Kind = 1984
KindLabel Kind = 1985
KindRelayReviews Kind = 1986
KindAIEmbeddings Kind = 1987
KindTorrent Kind = 2003
KindTorrentComment Kind = 2004
KindCoinjoinPool Kind = 2022
KindCommunityPostApproval Kind = 4550
KindJobFeedback Kind = 7000
KindSimpleGroupPutUser Kind = 9000
KindSimpleGroupRemoveUser Kind = 9001
KindSimpleGroupEditMetadata Kind = 9002
KindSimpleGroupDeleteEvent Kind = 9005
KindSimpleGroupCreateGroup Kind = 9007
KindSimpleGroupDeleteGroup Kind = 9008
KindSimpleGroupCreateInvite Kind = 9009
KindSimpleGroupJoinRequest Kind = 9021
KindSimpleGroupLeaveRequest Kind = 9022
KindZapGoal Kind = 9041
KindNutZap Kind = 9321
KindTidalLogin Kind = 9467
KindZapRequest Kind = 9734
KindZap Kind = 9735
KindHighlights Kind = 9802
KindMuteList Kind = 10000
KindPinList Kind = 10001
KindRelayListMetadata Kind = 10002
KindBookmarkList Kind = 10003
KindCommunityList Kind = 10004
KindPublicChatList Kind = 10005
KindBlockedRelayList Kind = 10006
KindSearchRelayList Kind = 10007
KindSimpleGroupList Kind = 10009
KindInterestList Kind = 10015
KindNutZapInfo Kind = 10019
KindEmojiList Kind = 10030
KindDMRelayList Kind = 10050
KindUserServerList Kind = 10063
KindFileStorageServerList Kind = 10096
KindGoodWikiAuthorList Kind = 10101
KindGoodWikiRelayList Kind = 10102
KindNWCWalletInfo Kind = 13194
KindLightningPubRPC Kind = 21000
KindClientAuthentication Kind = 22242
KindNWCWalletRequest Kind = 23194
KindNWCWalletResponse Kind = 23195
KindNostrConnect Kind = 24133
KindBlobs Kind = 24242
KindHTTPAuth Kind = 27235
KindCategorizedPeopleList Kind = 30000
KindCategorizedBookmarksList Kind = 30001
KindRelaySets Kind = 30002
KindBookmarkSets Kind = 30003
KindCuratedSets Kind = 30004
KindCuratedVideoSets Kind = 30005
KindMuteSets Kind = 30007
KindProfileBadges Kind = 30008
KindBadgeDefinition Kind = 30009
KindInterestSets Kind = 30015
KindStallDefinition Kind = 30017
KindProductDefinition Kind = 30018
KindMarketplaceUI Kind = 30019
KindProductSoldAsAuction Kind = 30020
KindArticle Kind = 30023
KindDraftArticle Kind = 30024
KindEmojiSets Kind = 30030
KindModularArticleHeader Kind = 30040
KindModularArticleContent Kind = 30041
KindReleaseArtifactSets Kind = 30063
KindApplicationSpecificData Kind = 30078
KindLiveEvent Kind = 30311
KindUserStatuses Kind = 30315
KindClassifiedListing Kind = 30402
KindDraftClassifiedListing Kind = 30403
KindRepositoryAnnouncement Kind = 30617
KindRepositoryState Kind = 30618
KindSimpleGroupMetadata Kind = 39000
KindSimpleGroupAdmins Kind = 39001
KindSimpleGroupMembers Kind = 39002
KindSimpleGroupRoles Kind = 39003
KindWikiArticle Kind = 30818
KindRedirects Kind = 30819
KindFeed Kind = 31890
KindDateCalendarEvent Kind = 31922
KindTimeCalendarEvent Kind = 31923
KindCalendar Kind = 31924
KindCalendarEventRSVP Kind = 31925
KindHandlerRecommendation Kind = 31989
KindHandlerInformation Kind = 31990
KindVideoEvent Kind = 34235
KindShortVideoEvent Kind = 34236
KindVideoViewEvent Kind = 34237
KindCommunityDefinition Kind = 34550
)
func IsRegularKind(kind uint16) bool {
func (kind Kind) IsRegular() bool {
return kind < 10000 && kind != 0 && kind != 3
}
func IsReplaceableKind(kind uint16) bool {
func (kind Kind) IsReplaceable() bool {
return kind == 0 || kind == 3 || (10000 <= kind && kind < 20000)
}
func IsEphemeralKind(kind uint16) bool {
func (kind Kind) IsEphemeral() bool {
return 20000 <= kind && kind < 30000
}
func IsAddressableKind(kind uint16) bool {
func (kind Kind) IsAddressable() bool {
return 30000 <= kind && kind < 40000
}

View File

@@ -1,19 +0,0 @@
package nostr
import (
"testing"
"github.com/stretchr/testify/require"
)
func KindKindTest(t *testing.T) {
require.True(t, IsRegularKind(1))
require.True(t, IsRegularKind(9))
require.True(t, IsRegularKind(1111))
require.True(t, IsReplaceableKind(0))
require.True(t, IsReplaceableKind(3))
require.True(t, IsReplaceableKind(10002))
require.True(t, IsReplaceableKind(10050))
require.True(t, IsAddressableKind(30023))
require.True(t, IsAddressableKind(39000))
}

View File

@@ -12,7 +12,7 @@ import (
func GetDMRelays(ctx context.Context, pubkey nostr.PubKey, pool *nostr.Pool, relaysToQuery []string) []string {
ie := pool.QuerySingle(ctx, relaysToQuery, nostr.Filter{
Authors: []nostr.PubKey{pubkey},
Kinds: []uint16{nostr.KindDMRelayList},
Kinds: []nostr.Kind{nostr.KindDMRelayList},
}, nostr.SubscriptionOptions{Label: "dm-relays"})
if ie == nil {
return nil
@@ -154,7 +154,7 @@ func ListenForMessages(
}
for ie := range pool.SubscribeMany(ctx, ourRelays, nostr.Filter{
Kinds: []uint16{nostr.KindGiftWrap},
Kinds: []nostr.Kind{nostr.KindGiftWrap},
Tags: nostr.TagMap{"p": []string{pk.Hex()}},
Since: &since,
}, nostr.SubscriptionOptions{Label: "mydms"}) {

View File

@@ -95,7 +95,7 @@ func Decode(bech32string string) (prefix string, value any, err error) {
if len(v) != 4 {
return prefix, nil, fmt.Errorf("invalid uint32 value for integer (%v)", v)
}
result.Kind = uint16(binary.BigEndian.Uint32(v))
result.Kind = nostr.Kind(binary.BigEndian.Uint32(v))
default:
// ignore
}
@@ -127,7 +127,7 @@ func Decode(bech32string string) (prefix string, value any, err error) {
}
result.PublicKey = nostr.PubKey(v)
case TLVKind:
result.Kind = uint16(binary.BigEndian.Uint32(v))
result.Kind = nostr.Kind(binary.BigEndian.Uint32(v))
default:
// ignore
}
@@ -185,7 +185,7 @@ func EncodeNevent(id nostr.ID, relays []string, author nostr.PubKey) string {
return nevent
}
func EncodeNaddr(pk nostr.PubKey, kind uint16, identifier string, relays []string) string {
func EncodeNaddr(pk nostr.PubKey, kind nostr.Kind, identifier string, relays []string) string {
buf := &bytes.Buffer{}
writeTLVEntry(buf, TLVDefault, []byte(identifier))

View File

@@ -11,7 +11,7 @@ type Role struct {
Description string
}
type KindRange []uint16
type KindRange []nostr.Kind
var ModerationEventKinds = KindRange{
nostr.KindSimpleGroupPutUser,
@@ -30,7 +30,7 @@ var MetadataEventKinds = KindRange{
nostr.KindSimpleGroupRoles,
}
func (kr KindRange) Includes(kind uint16) bool {
func (kr KindRange) Includes(kind nostr.Kind) bool {
_, ok := slices.BinarySearch(kr, kind)
return ok
}

View File

@@ -112,7 +112,7 @@ func NewBunker(
now := nostr.Now()
events := pool.SubscribeMany(ctx, relays, nostr.Filter{
Tags: nostr.TagMap{"p": []string{clientPublicKey.Hex()}},
Kinds: []uint16{nostr.KindNostrConnect},
Kinds: []nostr.Kind{nostr.KindNostrConnect},
Since: &now,
LimitZero: true,
}, nostr.SubscriptionOptions{

View File

@@ -77,7 +77,7 @@ func loadWalletFromPool(
return nil
}
kinds := []uint16{17375, 7375}
kinds := []nostr.Kind{17375, 7375}
if withHistory {
kinds = append(kinds, 7376)
}
@@ -95,7 +95,7 @@ func loadWalletFromPool(
deletions := pool.SubscribeManyNotifyEOSE(
ctx,
relays,
nostr.Filter{Kinds: []uint16{5}, Tags: nostr.TagMap{"k": []string{"7375"}}, Authors: []nostr.PubKey{pk}},
nostr.Filter{Kinds: []nostr.Kind{5}, Tags: nostr.TagMap{"k": []string{"7375"}}, Authors: []nostr.PubKey{pk}},
eoseChanD,
nostr.SubscriptionOptions{},
)

View File

@@ -27,7 +27,7 @@ func SendNutzap(
amount uint64,
message string,
) (chan nostr.PublishResult, error) {
ie := pool.QuerySingle(ctx, relays, nostr.Filter{Kinds: []uint16{10019}, Authors: []nostr.PubKey{targetUserPublickey}}, nostr.SubscriptionOptions{})
ie := pool.QuerySingle(ctx, relays, nostr.Filter{Kinds: []nostr.Kind{10019}, Authors: []nostr.PubKey{targetUserPublickey}}, nostr.SubscriptionOptions{})
if ie == nil {
return nil, NutzapsNotAccepted
}

View File

@@ -70,7 +70,7 @@ type EventPointer struct {
ID ID `json:"id"`
Relays []string `json:"relays,omitempty"`
Author PubKey `json:"author,omitempty"`
Kind uint16 `json:"kind,omitempty"`
Kind Kind `json:"kind,omitempty"`
}
// EventPointerFromTag creates an EventPointer from an "e" tag (but it could be other tag name, it isn't checked).
@@ -115,7 +115,7 @@ func (ep EventPointer) AsTag() Tag {
// EntityPointer represents a pointer to a nostr entity (addressable event).
type EntityPointer struct {
PublicKey PubKey `json:"pubkey"`
Kind uint16 `json:"kind,omitempty"`
Kind Kind `json:"kind,omitempty"`
Identifier string `json:"identifier,omitempty"`
Relays []string `json:"relays,omitempty"`
}
@@ -138,7 +138,7 @@ func EntityPointerFromTag(refTag Tag) (EntityPointer, error) {
}
pointer := EntityPointer{
Kind: uint16(kind),
Kind: Kind(kind),
PublicKey: pk,
Identifier: spl[2],
}
@@ -164,7 +164,7 @@ func (ep EntityPointer) AsTagReference() string {
func (ep EntityPointer) AsFilter() Filter {
return Filter{
Kinds: []uint16{ep.Kind},
Kinds: []Kind{ep.Kind},
Authors: []PubKey{ep.PublicKey},
Tags: TagMap{"d": []string{ep.Identifier}},
}

View File

@@ -29,7 +29,7 @@ type Pool struct {
eventMiddleware func(RelayEvent)
duplicateMiddleware func(relay string, id ID)
queryMiddleware func(relay string, pubkey PubKey, kind uint16)
queryMiddleware func(relay string, pubkey PubKey, kind Kind)
relayOptions RelayOptions
// custom things not often used
@@ -88,7 +88,7 @@ type PoolOptions struct {
// AuthorKindQueryMiddleware is a function that will be called with every combination of
// relay+pubkey+kind queried in a .SubscribeMany*() call -- when applicable (i.e. when the query
// contains a pubkey and a kind).
AuthorKindQueryMiddleware func(relay string, pubkey PubKey, kind uint16)
AuthorKindQueryMiddleware func(relay string, pubkey PubKey, kind Kind)
// RelayOptions are any options that should be passed to Relays instantiated by this pool
RelayOptions RelayOptions

View File

@@ -366,10 +366,10 @@ func (r *Relay) publish(ctx context.Context, id ID, env Envelope) error {
if gotOk {
return err
}
return ctx.Err()
return fmt.Errorf("publish: %w", context.Cause(ctx))
case <-r.connectionContext.Done():
// this is caused when we lose connectivity
return err
return fmt.Errorf("relay: %w", context.Cause(r.connectionContext))
}
}
}

View File

@@ -46,7 +46,7 @@ func TestPublish(t *testing.T) {
require.True(t, bytes.Equal(event.Serialize(), textNote.Serialize()))
// send back an ok nip-20 command result
res := []any{"OK", textNote.ID, true, ""}
res := []any{"OK", textNote.ID.Hex(), true, ""}
err = websocket.JSON.Send(conn, res)
require.NoError(t, err)
})
@@ -54,7 +54,7 @@ func TestPublish(t *testing.T) {
// connect a client and send the text note
rl := mustRelayConnect(t, ws.URL)
err = rl.Publish(context.Background(), textNote)
err = rl.Publish(t.Context(), textNote)
require.NoError(t, err)
require.True(t, published, "fake relay server saw no event")
@@ -73,15 +73,16 @@ func TestPublishBlocked(t *testing.T) {
require.NoError(t, err)
// send back a not ok nip-20 command result
res := []any{"OK", textNote.ID.String(), false, "blocked"}
res := []any{"OK", textNote.ID.Hex(), false, "blocked"}
websocket.JSON.Send(conn, res)
})
defer ws.Close()
// connect a client and send a text note
rl := mustRelayConnect(t, ws.URL)
err := rl.Publish(context.Background(), textNote)
err := rl.Publish(t.Context(), textNote)
require.Error(t, err)
require.EqualError(t, err, "msg: blocked")
}
func TestPublishWriteFailed(t *testing.T) {
@@ -100,7 +101,7 @@ func TestPublishWriteFailed(t *testing.T) {
rl := mustRelayConnect(t, ws.URL)
// Force brief period of time so that publish always fails on closed socket.
time.Sleep(1 * time.Millisecond)
err := rl.Publish(context.Background(), textNote)
err := rl.Publish(t.Context(), textNote)
require.Error(t, err)
}
@@ -117,7 +118,7 @@ func TestConnectContext(t *testing.T) {
defer ws.Close()
// relay client
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second)
ctx, cancel := context.WithTimeout(t.Context(), 3*time.Second)
defer cancel()
r, err := RelayConnect(ctx, ws.URL, RelayOptions{})
require.NoError(t, err)
@@ -137,7 +138,7 @@ func TestConnectContextCanceled(t *testing.T) {
defer ws.Close()
// relay client
ctx, cancel := context.WithCancel(context.Background())
ctx, cancel := context.WithCancel(t.Context())
cancel() // make ctx expired
_, err := RelayConnect(ctx, ws.URL, RelayOptions{})
require.ErrorIs(t, err, context.Canceled)
@@ -169,7 +170,7 @@ func makeKeyPair(t *testing.T) (priv, pub [32]byte) {
func mustRelayConnect(t *testing.T, url string) *Relay {
t.Helper()
rl, err := RelayConnect(context.Background(), url, RelayOptions{})
rl, err := RelayConnect(t.Context(), url, RelayOptions{})
require.NoError(t, err)
return rl

View File

@@ -28,7 +28,7 @@ func (sys *System) initializeAddressableDataloaders() {
sys.addressableLoaders[kind_30030] = sys.createAddressableDataloader(30030)
}
func (sys *System) createAddressableDataloader(kind uint16) *dataloader.Loader[nostr.PubKey, []nostr.Event] {
func (sys *System) createAddressableDataloader(kind nostr.Kind) *dataloader.Loader[nostr.PubKey, []nostr.Event] {
return dataloader.NewBatchedLoader(
func(ctxs []context.Context, pubkeys []nostr.PubKey) map[nostr.PubKey]dataloader.Result[[]nostr.Event] {
return sys.batchLoadAddressableEvents(ctxs, kind, pubkeys)
@@ -42,7 +42,7 @@ func (sys *System) createAddressableDataloader(kind uint16) *dataloader.Loader[n
func (sys *System) batchLoadAddressableEvents(
ctxs []context.Context,
kind uint16,
kind nostr.Kind,
pubkeys []nostr.PubKey,
) map[nostr.PubKey]dataloader.Result[[]nostr.Event] {
batchSize := len(pubkeys)
@@ -77,7 +77,7 @@ func (sys *System) batchLoadAddressableEvents(
dfilter = nostr.DirectedFilter{
Relay: relay,
Filter: nostr.Filter{
Kinds: []uint16{kind},
Kinds: []nostr.Kind{kind},
Authors: make([]nostr.PubKey, 0, batchSize-i /* this and all pubkeys after this can be added */),
},
}

View File

@@ -28,7 +28,7 @@ func makePubkeyStreamKey(prefix byte, pubkey nostr.PubKey) []byte {
func (sys *System) StreamLiveFeed(
ctx context.Context,
pubkeys []nostr.PubKey,
kinds []uint16,
kinds []nostr.Kind,
) (<-chan nostr.Event, error) {
events := make(chan nostr.Event)
@@ -102,7 +102,7 @@ func (sys *System) StreamLiveFeed(
func (sys *System) FetchFeedPage(
ctx context.Context,
pubkeys []nostr.PubKey,
kinds []uint16,
kinds []nostr.Kind,
until nostr.Timestamp,
totalLimit int,
) ([]nostr.Event, error) {

View File

@@ -128,7 +128,7 @@ func TestStreamLiveFeed(t *testing.T) {
go sys.Pool.PublishMany(ctx, []string{"ws://localhost:48482", "ws://localhost:48483"}, evt2)
// start streaming events for both pubkeys
events, err := sys.StreamLiveFeed(ctx, []nostr.PubKey{pk1, pk2}, []uint16{1})
events, err := sys.StreamLiveFeed(ctx, []nostr.PubKey{pk1, pk2}, []nostr.Kind{1})
if err != nil {
t.Fatalf("failed to start streaming: %v", err)
}

View File

@@ -30,7 +30,7 @@ func fetchGenericList[V comparable, I TagItemWithValue[V]](
sys *System,
ctx context.Context,
pubkey nostr.PubKey,
actualKind uint16,
actualKind nostr.Kind,
replaceableIndex replaceableIndex,
parseTag func(nostr.Tag) (I, bool),
cache cache.Cache32[GenericList[V, I]],
@@ -39,7 +39,7 @@ func fetchGenericList[V comparable, I TagItemWithValue[V]](
// call that will do it only once, the subsequent ones will wait for a result to be cached
// and then return it from cache -- 13 is an arbitrary index for the pubkey
n := pubkey[7]
lockIdx := (uint16(n) + actualKind) % 60
lockIdx := (nostr.Kind(n) + actualKind) % 60
genericListMutexes[lockIdx].Lock()
if valueWasJustCached[lockIdx] {
@@ -57,7 +57,7 @@ func fetchGenericList[V comparable, I TagItemWithValue[V]](
v := GenericList[V, I]{PubKey: pubkey}
for evt := range sys.Store.QueryEvents(nostr.Filter{Kinds: []uint16{actualKind}, Authors: []nostr.PubKey{pubkey}}) {
for evt := range sys.Store.QueryEvents(nostr.Filter{Kinds: []nostr.Kind{actualKind}, Authors: []nostr.PubKey{pubkey}}) {
// ok, we found something locally
items := parseItemsFromEventTags(evt, parseTag)
v.Event = &evt
@@ -138,7 +138,7 @@ func parseItemsFromEventTags[V comparable, I TagItemWithValue[V]](
return result
}
func getLocalStoreRefreshDaysForKind(kind uint16) nostr.Timestamp {
func getLocalStoreRefreshDaysForKind(kind nostr.Kind) nostr.Timestamp {
switch kind {
case 0:
return 7

View File

@@ -8,7 +8,7 @@ import (
var kvStoreLastFetchPrefix = byte('f')
func makeLastFetchKey(kind uint16, pubkey nostr.PubKey) []byte {
func makeLastFetchKey(kind nostr.Kind, pubkey nostr.PubKey) []byte {
buf := make([]byte, 1+5+32)
buf[0] = kvStoreLastFetchPrefix
binary.LittleEndian.PutUint32(buf[1:], uint32(kind))

View File

@@ -110,7 +110,7 @@ func (sys *System) FetchProfileMetadata(ctx context.Context, pubkey nostr.PubKey
pm.PubKey = pubkey
for evt := range sys.Store.QueryEvents(nostr.Filter{Kinds: []uint16{0}, Authors: []nostr.PubKey{pubkey}}) {
for evt := range sys.Store.QueryEvents(nostr.Filter{Kinds: []nostr.Kind{0}, Authors: []nostr.PubKey{pubkey}}) {
// ok, we found something locally
pm, _ = ParseMetadata(evt)
pm.PubKey = pubkey

View File

@@ -48,7 +48,7 @@ func (sys *System) initializeReplaceableDataloaders() {
sys.replaceableLoaders[kind_10030] = sys.createReplaceableDataloader(10030)
}
func (sys *System) createReplaceableDataloader(kind uint16) *dataloader.Loader[nostr.PubKey, nostr.Event] {
func (sys *System) createReplaceableDataloader(kind nostr.Kind) *dataloader.Loader[nostr.PubKey, nostr.Event] {
return dataloader.NewBatchedLoader(
func(ctxs []context.Context, pubkeys []nostr.PubKey) map[nostr.PubKey]dataloader.Result[nostr.Event] {
return sys.batchLoadReplaceableEvents(ctxs, kind, pubkeys)
@@ -62,7 +62,7 @@ func (sys *System) createReplaceableDataloader(kind uint16) *dataloader.Loader[n
func (sys *System) batchLoadReplaceableEvents(
ctxs []context.Context,
kind uint16,
kind nostr.Kind,
pubkeys []nostr.PubKey,
) map[nostr.PubKey]dataloader.Result[nostr.Event] {
batchSize := len(pubkeys)
@@ -98,7 +98,7 @@ func (sys *System) batchLoadReplaceableEvents(
dfilter = nostr.DirectedFilter{
Relay: relay,
Filter: nostr.Filter{
Kinds: []uint16{kind},
Kinds: []nostr.Kind{kind},
Authors: make([]nostr.PubKey, 0, batchSize-i /* this and all pubkeys after this can be added */),
},
}
@@ -141,7 +141,7 @@ func (sys *System) batchLoadReplaceableEvents(
}
}
func (sys *System) determineRelaysToQuery(ctx context.Context, pubkey nostr.PubKey, kind uint16) []string {
func (sys *System) determineRelaysToQuery(ctx context.Context, pubkey nostr.PubKey, kind nostr.Kind) []string {
var relays []string
// search in specific relays for user

View File

@@ -27,7 +27,7 @@ func TestMetadataAndEvents(t *testing.T) {
// fetch notes
filter := nostr.Filter{
Kinds: []uint16{1},
Kinds: []nostr.Kind{1},
Authors: []nostr.PubKey{meta.PubKey},
Limit: 5,
}

View File

@@ -22,13 +22,13 @@ func fetchGenericSets[V comparable, I TagItemWithValue[V]](
sys *System,
ctx context.Context,
pubkey nostr.PubKey,
actualKind uint16,
actualKind nostr.Kind,
addressableIndex addressableIndex,
parseTag func(nostr.Tag) (I, bool),
cache cache.Cache32[GenericSets[V, I]],
) (fl GenericSets[V, I], fromInternal bool) {
n := pubkey[7]
lockIdx := (uint16(n) + actualKind) % 60
lockIdx := (nostr.Kind(n) + actualKind) % 60
genericListMutexes[lockIdx].Lock()
if valueWasJustCached[lockIdx] {
@@ -47,7 +47,7 @@ func fetchGenericSets[V comparable, I TagItemWithValue[V]](
v := GenericSets[V, I]{PubKey: pubkey}
events := slices.Collect(
sys.Store.QueryEvents(nostr.Filter{Kinds: []uint16{actualKind}, Authors: []nostr.PubKey{pubkey}}),
sys.Store.QueryEvents(nostr.Filter{Kinds: []nostr.Kind{actualKind}, Authors: []nostr.PubKey{pubkey}}),
)
if len(events) != 0 {
// ok, we found something locally

View File

@@ -84,7 +84,7 @@ func (sys *System) FetchSpecificEvent(
author = v.PublicKey
filter.Authors = []nostr.PubKey{v.PublicKey}
filter.Tags = nostr.TagMap{"d": []string{v.Identifier}}
filter.Kinds = []uint16{v.Kind}
filter.Kinds = []nostr.Kind{v.Kind}
relays = append(relays, v.Relays...)
relays = appendUnique(relays, sys.FallbackRelays.Next())
fallback = append(fallback, sys.FallbackRelays.Next(), sys.FallbackRelays.Next())

View File

@@ -8,7 +8,7 @@ import (
"fiatjaf.com/nostr/sdk/hints"
)
func (sys *System) TrackQueryAttempts(relay string, author nostr.PubKey, kind uint16) {
func (sys *System) TrackQueryAttempts(relay string, author nostr.PubKey, kind nostr.Kind) {
if IsVirtualRelay(relay) {
return
}

View File

@@ -17,7 +17,7 @@ func TestSubscribeBasic(t *testing.T) {
rl := mustRelayConnect(t, RELAY)
defer rl.Close()
sub, err := rl.Subscribe(context.Background(), Filter{Kinds: []uint16{KindTextNote}, Limit: 2}, SubscriptionOptions{})
sub, err := rl.Subscribe(context.Background(), Filter{Kinds: []Kind{KindTextNote}, Limit: 2}, SubscriptionOptions{})
assert.NoError(t, err)
timeout := time.After(5 * time.Second)
@@ -51,14 +51,14 @@ func TestNestedSubscriptions(t *testing.T) {
n := atomic.Uint32{}
// fetch 2 replies to a note
sub, err := rl.Subscribe(context.Background(), Filter{Kinds: []uint16{KindTextNote}, Tags: TagMap{"e": []string{"0e34a74f8547e3b95d52a2543719b109fd0312aba144e2ef95cba043f42fe8c5"}}, Limit: 3}, SubscriptionOptions{})
sub, err := rl.Subscribe(context.Background(), Filter{Kinds: []Kind{KindTextNote}, Tags: TagMap{"e": []string{"0e34a74f8547e3b95d52a2543719b109fd0312aba144e2ef95cba043f42fe8c5"}}, Limit: 3}, SubscriptionOptions{})
assert.NoError(t, err)
for {
select {
case event := <-sub.Events:
// now fetch author of this
sub, err := rl.Subscribe(context.Background(), Filter{Kinds: []uint16{KindProfileMetadata}, Authors: []PubKey{event.PubKey}, Limit: 1}, SubscriptionOptions{})
sub, err := rl.Subscribe(context.Background(), Filter{Kinds: []Kind{KindProfileMetadata}, Authors: []PubKey{event.PubKey}, Limit: 1}, SubscriptionOptions{})
assert.NoError(t, err)
for {