1
1
mirror of https://github.com/go-gitea/gitea synced 2024-11-20 00:54:25 +00:00
gitea/vendor/github.com/src-d/enry/v2/internal/tokenizer/common.go

8 lines
313 B
Go
Raw Normal View History

// Package tokenizer implements file tokenization used by the enry content
// classifier. This package is an implementation detail of enry and should not
// be imported by other packages.
package tokenizer
// ByteLimit defines the maximum prefix of an input text that will be tokenized.
const ByteLimit = 100000