Skip to content

Commit

Permalink
add support for limit files using shell like patterns
Browse files Browse the repository at this point in the history
  • Loading branch information
drakkan committed Nov 15, 2020
1 parent c0f47a5 commit a6355e2
Show file tree
Hide file tree
Showing 19 changed files with 702 additions and 261 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ It can serve local filesystem, S3 (compatible) Object Storage, Google Cloud Stor
- Per user and per directory permission management: list directory contents, upload, overwrite, download, delete, rename, create directories, create symlinks, change owner/group and mode, change access and modification times.
- Per user files/folders ownership mapping: you can map all the users to the system account that runs SFTPGo (all platforms are supported) or you can run SFTPGo as root user and map each user or group of users to a different system account (\*NIX only).
- Per user IP filters are supported: login can be restricted to specific ranges of IP addresses or to a specific IP address.
- Per user and per directory file extensions filters are supported: files can be allowed or denied based on their extensions.
- Per user and per directory shell like patterns filters are supported: files can be allowed or denied based on shell like patterns.
- Virtual folders are supported: directories outside the user home directory can be exposed as virtual folders.
- Configurable custom commands and/or HTTP notifications on file upload, download, pre-delete, delete, rename, on SSH commands and on user add, update and delete.
- Automatically terminating idle connections.
Expand Down
64 changes: 32 additions & 32 deletions cmd/portable.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@ var (
portablePublicKeys []string
portablePermissions []string
portableSSHCommands []string
portableAllowedExtensions []string
portableDeniedExtensions []string
portableAllowedPatterns []string
portableDeniedPatterns []string
portableFsProvider int
portableS3Bucket string
portableS3Region string
Expand Down Expand Up @@ -174,7 +174,7 @@ Please take a look at the usage below to customize the serving parameters`,
},
},
Filters: dataprovider.UserFilters{
FileExtensions: parseFileExtensionsFilters(),
FilePatterns: parsePatternsFilesFilters(),
},
},
}
Expand Down Expand Up @@ -217,16 +217,16 @@ value`)
portableCmd.Flags().StringSliceVarP(&portablePermissions, "permissions", "g", []string{"list", "download"},
`User's permissions. "*" means any
permission`)
portableCmd.Flags().StringArrayVar(&portableAllowedExtensions, "allowed-extensions", []string{},
`Allowed file extensions case
insensitive. The format is
/dir::ext1,ext2.
For example: "/somedir::.jpg,.png"`)
portableCmd.Flags().StringArrayVar(&portableDeniedExtensions, "denied-extensions", []string{},
`Denied file extensions case
insensitive. The format is
/dir::ext1,ext2.
For example: "/somedir::.jpg,.png"`)
portableCmd.Flags().StringArrayVar(&portableAllowedPatterns, "allowed-patterns", []string{},
`Allowed file patterns case insensitive.
The format is:
/dir::pattern1,pattern2.
For example: "/somedir::*.jpg,a*b?.png"`)
portableCmd.Flags().StringArrayVar(&portableDeniedPatterns, "denied-patterns", []string{},
`Denied file patterns case insensitive.
The format is:
/dir::pattern1,pattern2.
For example: "/somedir::*.jpg,a*b?.png"`)
portableCmd.Flags().BoolVarP(&portableAdvertiseService, "advertise-service", "S", false,
`Advertise SFTP/FTP service using
multicast DNS`)
Expand Down Expand Up @@ -287,42 +287,42 @@ parallel`)
rootCmd.AddCommand(portableCmd)
}

func parseFileExtensionsFilters() []dataprovider.ExtensionsFilter {
var extensions []dataprovider.ExtensionsFilter
for _, val := range portableAllowedExtensions {
p, exts := getExtensionsFilterValues(strings.TrimSpace(val))
func parsePatternsFilesFilters() []dataprovider.PatternsFilter {
var patterns []dataprovider.PatternsFilter
for _, val := range portableAllowedPatterns {
p, exts := getPatternsFilterValues(strings.TrimSpace(val))
if len(p) > 0 {
extensions = append(extensions, dataprovider.ExtensionsFilter{
Path: path.Clean(p),
AllowedExtensions: exts,
DeniedExtensions: []string{},
patterns = append(patterns, dataprovider.PatternsFilter{
Path: path.Clean(p),
AllowedPatterns: exts,
DeniedPatterns: []string{},
})
}
}
for _, val := range portableDeniedExtensions {
p, exts := getExtensionsFilterValues(strings.TrimSpace(val))
for _, val := range portableDeniedPatterns {
p, exts := getPatternsFilterValues(strings.TrimSpace(val))
if len(p) > 0 {
found := false
for index, e := range extensions {
for index, e := range patterns {
if path.Clean(e.Path) == path.Clean(p) {
extensions[index].DeniedExtensions = append(extensions[index].DeniedExtensions, exts...)
patterns[index].DeniedPatterns = append(patterns[index].DeniedPatterns, exts...)
found = true
break
}
}
if !found {
extensions = append(extensions, dataprovider.ExtensionsFilter{
Path: path.Clean(p),
AllowedExtensions: []string{},
DeniedExtensions: exts,
patterns = append(patterns, dataprovider.PatternsFilter{
Path: path.Clean(p),
AllowedPatterns: []string{},
DeniedPatterns: exts,
})
}
}
}
return extensions
return patterns
}

func getExtensionsFilterValues(value string) (string, []string) {
func getPatternsFilterValues(value string) (string, []string) {
if strings.Contains(value, "::") {
dirExts := strings.Split(value, "::")
if len(dirExts) > 1 {
Expand All @@ -334,7 +334,7 @@ func getExtensionsFilterValues(value string) (string, []string) {
exts = append(exts, cleanedExt)
}
}
if len(dir) > 0 && len(exts) > 0 {
if dir != "" && len(exts) > 0 {
return dir, exts
}
}
Expand Down
63 changes: 62 additions & 1 deletion dataprovider/dataprovider.go
Original file line number Diff line number Diff line change
Expand Up @@ -903,6 +903,50 @@ func validatePublicKeys(user *User) error {
return nil
}

func validateFiltersPatternExtensions(user *User) error {
if len(user.Filters.FilePatterns) == 0 {
user.Filters.FilePatterns = []PatternsFilter{}
return nil
}
filteredPaths := []string{}
var filters []PatternsFilter
for _, f := range user.Filters.FilePatterns {
cleanedPath := filepath.ToSlash(path.Clean(f.Path))
if !path.IsAbs(cleanedPath) {
return &ValidationError{err: fmt.Sprintf("invalid path %#v for file patterns filter", f.Path)}
}
if utils.IsStringInSlice(cleanedPath, filteredPaths) {
return &ValidationError{err: fmt.Sprintf("duplicate file patterns filter for path %#v", f.Path)}
}
if len(f.AllowedPatterns) == 0 && len(f.DeniedPatterns) == 0 {
return &ValidationError{err: fmt.Sprintf("empty file patterns filter for path %#v", f.Path)}
}
f.Path = cleanedPath
allowed := make([]string, 0, len(f.AllowedPatterns))
denied := make([]string, 0, len(f.DeniedPatterns))
for _, pattern := range f.AllowedPatterns {
_, err := path.Match(pattern, "abc")
if err != nil {
return &ValidationError{err: fmt.Sprintf("invalid file pattern filter %v", pattern)}
}
allowed = append(allowed, strings.ToLower(pattern))
}
for _, pattern := range f.DeniedPatterns {
_, err := path.Match(pattern, "abc")
if err != nil {
return &ValidationError{err: fmt.Sprintf("invalid file pattern filter %v", pattern)}
}
denied = append(denied, strings.ToLower(pattern))
}
f.AllowedPatterns = allowed
f.DeniedPatterns = denied
filters = append(filters, f)
filteredPaths = append(filteredPaths, cleanedPath)
}
user.Filters.FilePatterns = filters
return nil
}

func validateFiltersFileExtensions(user *User) error {
if len(user.Filters.FileExtensions) == 0 {
user.Filters.FileExtensions = []ExtensionsFilter{}
Expand All @@ -922,13 +966,30 @@ func validateFiltersFileExtensions(user *User) error {
return &ValidationError{err: fmt.Sprintf("empty file extensions filter for path %#v", f.Path)}
}
f.Path = cleanedPath
allowed := make([]string, 0, len(f.AllowedExtensions))
denied := make([]string, 0, len(f.DeniedExtensions))
for _, ext := range f.AllowedExtensions {
allowed = append(allowed, strings.ToLower(ext))
}
for _, ext := range f.DeniedExtensions {
denied = append(denied, strings.ToLower(ext))
}
f.AllowedExtensions = allowed
f.DeniedExtensions = denied
filters = append(filters, f)
filteredPaths = append(filteredPaths, cleanedPath)
}
user.Filters.FileExtensions = filters
return nil
}

func validateFileFilters(user *User) error {
if err := validateFiltersFileExtensions(user); err != nil {
return err
}
return validateFiltersPatternExtensions(user)
}

func validateFilters(user *User) error {
if len(user.Filters.AllowedIP) == 0 {
user.Filters.AllowedIP = []string{}
Expand Down Expand Up @@ -970,7 +1031,7 @@ func validateFilters(user *User) error {
return &ValidationError{err: fmt.Sprintf("invalid protocol: %#v", p)}
}
}
return validateFiltersFileExtensions(user)
return validateFileFilters(user)
}

func saveGCSCredentials(user *User) error {
Expand Down
81 changes: 73 additions & 8 deletions dataprovider/user.go
Original file line number Diff line number Diff line change
Expand Up @@ -81,25 +81,46 @@ func (c *CachedUser) IsExpired() bool {
// ExtensionsFilter defines filters based on file extensions.
// These restrictions do not apply to files listing for performance reasons, so
// a denied file cannot be downloaded/overwritten/renamed but will still be
// it will still be listed in the list of files.
// in the list of files.
// System commands such as Git and rsync interacts with the filesystem directly
// and they are not aware about these restrictions so they are not allowed
// inside paths with extensions filters
type ExtensionsFilter struct {
// SFTP/SCP path, if no other specific filter is defined, the filter apply for
// Virtual path, if no other specific filter is defined, the filter apply for
// sub directories too.
// For example if filters are defined for the paths "/" and "/sub" then the
// filters for "/" are applied for any file outside the "/sub" directory
Path string `json:"path"`
// only files with these, case insensitive, extensions are allowed.
// Shell like expansion is not supported so you have to specify ".jpg" and
// not "*.jpg"
// not "*.jpg". If you want shell like patterns use pattern filters
AllowedExtensions []string `json:"allowed_extensions,omitempty"`
// files with these, case insensitive, extensions are not allowed.
// Denied file extensions are evaluated before the allowed ones
DeniedExtensions []string `json:"denied_extensions,omitempty"`
}

// PatternsFilter defines filters based on shell like patterns.
// These restrictions do not apply to files listing for performance reasons, so
// a denied file cannot be downloaded/overwritten/renamed but will still be
// in the list of files.
// System commands such as Git and rsync interacts with the filesystem directly
// and they are not aware about these restrictions so they are not allowed
// inside paths with extensions filters
type PatternsFilter struct {
// Virtual path, if no other specific filter is defined, the filter apply for
// sub directories too.
// For example if filters are defined for the paths "/" and "/sub" then the
// filters for "/" are applied for any file outside the "/sub" directory
Path string `json:"path"`
// files with these, case insensitive, patterns are allowed.
// Denied file patterns are evaluated before the allowed ones
AllowedPatterns []string `json:"allowed_patterns,omitempty"`
// files with these, case insensitive, patterns are not allowed.
// Denied file patterns are evaluated before the allowed ones
DeniedPatterns []string `json:"denied_patterns,omitempty"`
}

// UserFilters defines additional restrictions for a user
type UserFilters struct {
// only clients connecting from these IP/Mask are allowed.
Expand All @@ -118,6 +139,8 @@ type UserFilters struct {
// filters based on file extensions.
// Please note that these restrictions can be easily bypassed.
FileExtensions []ExtensionsFilter `json:"file_extensions,omitempty"`
// filter based on shell patterns
FilePatterns []PatternsFilter `json:"file_patterns,omitempty"`
// max size allowed for a single upload, 0 means unlimited
MaxUploadFileSize int64 `json:"max_upload_file_size,omitempty"`
}
Expand Down Expand Up @@ -444,11 +467,15 @@ func (u *User) GetAllowedLoginMethods() []string {
}

// IsFileAllowed returns true if the specified file is allowed by the file restrictions filters
func (u *User) IsFileAllowed(sftpPath string) bool {
func (u *User) IsFileAllowed(virtualPath string) bool {
return u.isFilePatternAllowed(virtualPath) && u.isFileExtensionAllowed(virtualPath)
}

func (u *User) isFileExtensionAllowed(virtualPath string) bool {
if len(u.Filters.FileExtensions) == 0 {
return true
}
dirsForPath := utils.GetDirsForSFTPPath(path.Dir(sftpPath))
dirsForPath := utils.GetDirsForSFTPPath(path.Dir(virtualPath))
var filter ExtensionsFilter
for _, dir := range dirsForPath {
for _, f := range u.Filters.FileExtensions {
Expand All @@ -457,12 +484,12 @@ func (u *User) IsFileAllowed(sftpPath string) bool {
break
}
}
if len(filter.Path) > 0 {
if filter.Path != "" {
break
}
}
if len(filter.Path) > 0 {
toMatch := strings.ToLower(sftpPath)
if filter.Path != "" {
toMatch := strings.ToLower(virtualPath)
for _, denied := range filter.DeniedExtensions {
if strings.HasSuffix(toMatch, denied) {
return false
Expand All @@ -478,6 +505,42 @@ func (u *User) IsFileAllowed(sftpPath string) bool {
return true
}

func (u *User) isFilePatternAllowed(virtualPath string) bool {
if len(u.Filters.FilePatterns) == 0 {
return true
}
dirsForPath := utils.GetDirsForSFTPPath(path.Dir(virtualPath))
var filter PatternsFilter
for _, dir := range dirsForPath {
for _, f := range u.Filters.FilePatterns {
if f.Path == dir {
filter = f
break
}
}
if filter.Path != "" {
break
}
}
if filter.Path != "" {
toMatch := strings.ToLower(path.Base(virtualPath))
for _, denied := range filter.DeniedPatterns {
matched, err := path.Match(denied, toMatch)
if err != nil || matched {
return false
}
}
for _, allowed := range filter.AllowedPatterns {
matched, err := path.Match(allowed, toMatch)
if err == nil && matched {
return true
}
}
return len(filter.AllowedPatterns) == 0
}
return true
}

// IsLoginFromAddrAllowed returns true if the login is allowed from the specified remoteAddr.
// If AllowedIP is defined only the specified IP/Mask can login.
// If DeniedIP is defined the specified IP/Mask cannot login.
Expand Down Expand Up @@ -711,6 +774,8 @@ func (u *User) getACopy() User {
copy(filters.DeniedLoginMethods, u.Filters.DeniedLoginMethods)
filters.FileExtensions = make([]ExtensionsFilter, len(u.Filters.FileExtensions))
copy(filters.FileExtensions, u.Filters.FileExtensions)
filters.FilePatterns = make([]PatternsFilter, len(u.Filters.FilePatterns))
copy(filters.FilePatterns, u.Filters.FilePatterns)
filters.DeniedProtocols = make([]string, len(u.Filters.DeniedProtocols))
copy(filters.DeniedProtocols, u.Filters.DeniedProtocols)
fsConfig := Filesystem{
Expand Down
12 changes: 8 additions & 4 deletions docs/account.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,14 @@ For each account, the following properties can be configured:
- `SSH`
- `FTP`
- `DAV`
- `file_extensions`, list of struct. These restrictions do not apply to files listing for performance reasons, so a denied file cannot be downloaded/overwritten/renamed but it will still be listed in the list of files. Please note that these restrictions can be easily bypassed. Each struct contains the following fields:
- `allowed_extensions`, list of, case insensitive, allowed files extension. Shell like expansion is not supported so you have to specify `.jpg` and not `*.jpg`. Any file that does not end with this suffix will be denied
- `denied_extensions`, list of, case insensitive, denied files extension. Denied file extensions are evaluated before the allowed ones
- `path`, SFTP/SCP path, if no other specific filter is defined, the filter apply for sub directories too. For example if filters are defined for the paths `/` and `/sub` then the filters for `/` are applied for any file outside the `/sub` directory
- `file_extensions`, list of struct. Deprecated, please use `file_patterns`. These restrictions do not apply to files listing for performance reasons, so a denied file cannot be downloaded/overwritten/renamed but it will still be in the list of files. Please note that these restrictions can be easily bypassed. Each struct contains the following fields:
- `allowed_extensions`, list of, case insensitive, allowed file extensions. Shell like expansion is not supported so you have to specify `.jpg` and not `*.jpg`. Any file that does not end with this suffix will be denied
- `denied_extensions`, list of, case insensitive, denied file extensions. Denied file extensions are evaluated before the allowed ones
- `path`, exposed virtual path, if no other specific filter is defined, the filter apply for sub directories too. For example if filters are defined for the paths `/` and `/sub` then the filters for `/` are applied for any file outside the `/sub` directory
- `file_patterns`, list of struct. These restrictions do not apply to files listing for performance reasons, so a denied file cannot be downloaded/overwritten/renamed but it will still be in the list of files. Please note that these restrictions can be easily bypassed. For syntax details take a look [here](https://golang.org/pkg/path/#Match). Each struct contains the following fields:
- `allowed_patterns`, list of, case insensitive, allowed file patterns. Examples: `*.jpg`, `a*b?.png`. Any non matching file will be denied
- `denied_patterns`, list of, case insensitive, denied file patterns. Denied file patterns are evaluated before the allowed ones
- `path`, exposed virtual path, if no other specific filter is defined, the filter apply for sub directories too. For example if filters are defined for the paths `/` and `/sub` then the filters for `/` are applied for any file outside the `/sub` directory
- `fs_provider`, filesystem to serve via SFTP. Local filesystem (0), S3 Compatible Object Storage (1), Google Cloud Storage (2) and Azure Blob Storage (3) are supported
- `s3_bucket`, required for S3 filesystem
- `s3_region`, required for S3 filesystem. Must match the region for your bucket. You can find here the list of available [AWS regions](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-regions-availability-zones.html#concepts-available-regions). For example if your bucket is at `Frankfurt` you have to set the region to `eu-central-1`
Expand Down
Loading

0 comments on commit a6355e2

Please sign in to comment.