mirror of
https://github.com/superseriousbusiness/gotosocial.git
synced 2024-11-27 10:51:00 +00:00
[feature] Media attachment placeholders (#2331)
* [feature] Use placeholders for unknown media types * fix read of underreported small files * switch to reduce nesting * simplify cleanup
This commit is contained in:
parent
c7ecab9e6f
commit
ba9d6b467a
41 changed files with 1472 additions and 841 deletions
|
@ -99,8 +99,8 @@ func (suite *GetTestSuite) TestGet() {
|
|||
"header_static": "http://localhost:8080/assets/default_header.png",
|
||||
"followers_count": 0,
|
||||
"following_count": 0,
|
||||
"statuses_count": 0,
|
||||
"last_status_at": null,
|
||||
"statuses_count": 1,
|
||||
"last_status_at": "2023-11-02T10:44:25.000Z",
|
||||
"emojis": [],
|
||||
"fields": []
|
||||
}
|
||||
|
|
|
@ -216,8 +216,8 @@ func (suite *MediaCreateTestSuite) TestMediaCreateSuccessful() {
|
|||
X: -0.5,
|
||||
Y: 0.5,
|
||||
},
|
||||
}, attachmentReply.Meta)
|
||||
suite.Equal("LiBzRk#6V[WF_NvzV@WY_3rqV@a$", attachmentReply.Blurhash)
|
||||
}, *attachmentReply.Meta)
|
||||
suite.Equal("LiBzRk#6V[WF_NvzV@WY_3rqV@a$", *attachmentReply.Blurhash)
|
||||
suite.NotEmpty(attachmentReply.ID)
|
||||
suite.NotEmpty(attachmentReply.URL)
|
||||
suite.NotEmpty(attachmentReply.PreviewURL)
|
||||
|
@ -301,8 +301,8 @@ func (suite *MediaCreateTestSuite) TestMediaCreateSuccessfulV2() {
|
|||
X: -0.5,
|
||||
Y: 0.5,
|
||||
},
|
||||
}, attachmentReply.Meta)
|
||||
suite.Equal("LiBzRk#6V[WF_NvzV@WY_3rqV@a$", attachmentReply.Blurhash)
|
||||
}, *attachmentReply.Meta)
|
||||
suite.Equal("LiBzRk#6V[WF_NvzV@WY_3rqV@a$", *attachmentReply.Blurhash)
|
||||
suite.NotEmpty(attachmentReply.ID)
|
||||
suite.Nil(attachmentReply.URL)
|
||||
suite.NotEmpty(attachmentReply.PreviewURL)
|
||||
|
|
|
@ -186,8 +186,8 @@ func (suite *MediaUpdateTestSuite) TestUpdateImage() {
|
|||
Original: apimodel.MediaDimensions{Width: 800, Height: 450, FrameRate: "", Duration: 0, Bitrate: 0, Size: "800x450", Aspect: 1.7777778},
|
||||
Small: apimodel.MediaDimensions{Width: 256, Height: 144, FrameRate: "", Duration: 0, Bitrate: 0, Size: "256x144", Aspect: 1.7777778},
|
||||
Focus: &apimodel.MediaFocus{X: -0.1, Y: 0.3},
|
||||
}, attachmentReply.Meta)
|
||||
suite.Equal(toUpdate.Blurhash, attachmentReply.Blurhash)
|
||||
}, *attachmentReply.Meta)
|
||||
suite.Equal(toUpdate.Blurhash, *attachmentReply.Blurhash)
|
||||
suite.Equal(toUpdate.ID, attachmentReply.ID)
|
||||
suite.Equal(toUpdate.URL, *attachmentReply.URL)
|
||||
suite.NotEmpty(toUpdate.Thumbnail.URL, attachmentReply.PreviewURL)
|
||||
|
|
|
@ -70,10 +70,10 @@ type Attachment struct {
|
|||
URL *string `json:"url"`
|
||||
// A shorter URL for the attachment.
|
||||
// In our case, we just give the URL again since we don't create smaller URLs.
|
||||
TextURL string `json:"text_url"`
|
||||
TextURL *string `json:"text_url"`
|
||||
// The location of a scaled-down preview of the attachment.
|
||||
// example: https://example.org/fileserver/some_id/attachments/some_id/small/attachment.jpeg
|
||||
PreviewURL string `json:"preview_url"`
|
||||
PreviewURL *string `json:"preview_url"`
|
||||
// The location of the full-size original attachment on the remote server.
|
||||
// Only defined for instances other than our own.
|
||||
// example: https://some-other-server.org/attachments/original/ahhhhh.jpeg
|
||||
|
@ -83,13 +83,13 @@ type Attachment struct {
|
|||
// example: https://some-other-server.org/attachments/small/ahhhhh.jpeg
|
||||
PreviewRemoteURL *string `json:"preview_remote_url"`
|
||||
// Metadata for this attachment.
|
||||
Meta MediaMeta `json:"meta,omitempty"`
|
||||
Meta *MediaMeta `json:"meta"`
|
||||
// Alt text that describes what is in the media attachment.
|
||||
// example: This is a picture of a kitten.
|
||||
Description *string `json:"description"`
|
||||
// A hash computed by the BlurHash algorithm, for generating colorful preview thumbnails when media has not been downloaded yet.
|
||||
// See https://github.com/woltapp/blurhash
|
||||
Blurhash string `json:"blurhash,omitempty"`
|
||||
Blurhash *string `json:"blurhash"`
|
||||
}
|
||||
|
||||
// MediaMeta models media metadata.
|
||||
|
|
|
@ -290,7 +290,7 @@ func (m *Media) isOrphaned(ctx context.Context, path string) (bool, error) {
|
|||
|
||||
case media.TypeEmoji:
|
||||
// Generate static URL for this emoji to lookup.
|
||||
staticURL := uris.GenerateURIForAttachment(
|
||||
staticURL := uris.URIForAttachment(
|
||||
pathParts[1], // instance account ID
|
||||
string(media.TypeEmoji),
|
||||
string(media.SizeStatic),
|
||||
|
|
|
@ -304,7 +304,7 @@ func (suite *MediaTestSuite) TestUncacheRemote() {
|
|||
after := time.Now().Add(-24 * time.Hour)
|
||||
totalUncached, err := suite.cleaner.Media().UncacheRemote(ctx, after)
|
||||
suite.NoError(err)
|
||||
suite.Equal(2, totalUncached)
|
||||
suite.Equal(3, totalUncached)
|
||||
|
||||
uncachedAttachment, err := suite.db.GetAttachmentByID(ctx, testStatusAttachment.ID)
|
||||
suite.NoError(err)
|
||||
|
@ -327,7 +327,7 @@ func (suite *MediaTestSuite) TestUncacheRemoteDry() {
|
|||
after := time.Now().Add(-24 * time.Hour)
|
||||
totalUncached, err := suite.cleaner.Media().UncacheRemote(gtscontext.SetDryRun(ctx), after)
|
||||
suite.NoError(err)
|
||||
suite.Equal(2, totalUncached)
|
||||
suite.Equal(3, totalUncached)
|
||||
|
||||
uncachedAttachment, err := suite.db.GetAttachmentByID(ctx, testStatusAttachment.ID)
|
||||
suite.NoError(err)
|
||||
|
@ -344,7 +344,7 @@ func (suite *MediaTestSuite) TestUncacheRemoteTwice() {
|
|||
|
||||
totalUncached, err := suite.cleaner.Media().UncacheRemote(ctx, after)
|
||||
suite.NoError(err)
|
||||
suite.Equal(2, totalUncached)
|
||||
suite.Equal(3, totalUncached)
|
||||
|
||||
// final uncache should uncache nothing, since the first uncache already happened
|
||||
totalUncachedAgain, err := suite.cleaner.Media().UncacheRemote(ctx, after)
|
||||
|
@ -360,7 +360,7 @@ func (suite *MediaTestSuite) TestUncacheAndRecache() {
|
|||
after := time.Now().Add(-24 * time.Hour)
|
||||
totalUncached, err := suite.cleaner.Media().UncacheRemote(ctx, after)
|
||||
suite.NoError(err)
|
||||
suite.Equal(2, totalUncached)
|
||||
suite.Equal(3, totalUncached)
|
||||
|
||||
// media should no longer be stored
|
||||
_, err = suite.storage.Get(ctx, testStatusAttachment.File.Path)
|
||||
|
@ -424,5 +424,5 @@ func (suite *MediaTestSuite) TestUncacheOneNonExistent() {
|
|||
after := time.Now().Add(-24 * time.Hour)
|
||||
totalUncached, err := suite.cleaner.Media().UncacheRemote(ctx, after)
|
||||
suite.NoError(err)
|
||||
suite.Equal(2, totalUncached)
|
||||
suite.Equal(3, totalUncached)
|
||||
}
|
||||
|
|
|
@ -121,7 +121,7 @@ func (suite *BasicTestSuite) TestGetAllStatuses() {
|
|||
s := []*gtsmodel.Status{}
|
||||
err := suite.db.GetAll(context.Background(), &s)
|
||||
suite.NoError(err)
|
||||
suite.Len(s, 20)
|
||||
suite.Len(s, 21)
|
||||
}
|
||||
|
||||
func (suite *BasicTestSuite) TestGetAllNotNull() {
|
||||
|
|
|
@ -39,7 +39,7 @@ func (suite *MediaTestSuite) TestGetAttachmentByID() {
|
|||
func (suite *MediaTestSuite) TestGetOlder() {
|
||||
attachments, err := suite.db.GetCachedAttachmentsOlderThan(context.Background(), time.Now(), 20)
|
||||
suite.NoError(err)
|
||||
suite.Len(attachments, 2)
|
||||
suite.Len(attachments, 3)
|
||||
}
|
||||
|
||||
func (suite *MediaTestSuite) TestGetCachedAttachmentsOlderThan() {
|
||||
|
@ -47,7 +47,7 @@ func (suite *MediaTestSuite) TestGetCachedAttachmentsOlderThan() {
|
|||
|
||||
attachments, err := suite.db.GetCachedAttachmentsOlderThan(ctx, time.Now(), 20)
|
||||
suite.NoError(err)
|
||||
suite.Len(attachments, 2)
|
||||
suite.Len(attachments, 3)
|
||||
}
|
||||
|
||||
func TestMediaTestSuite(t *testing.T) {
|
||||
|
|
|
@ -316,6 +316,7 @@ func (s *statusDB) PutStatus(ctx context.Context, status *gtsmodel.Status) error
|
|||
if _, err := tx.
|
||||
NewUpdate().
|
||||
Model(a).
|
||||
Column("status_id", "updated_at").
|
||||
Where("? = ?", bun.Ident("media_attachment.id"), a.ID).
|
||||
Exec(ctx); err != nil {
|
||||
if !errors.Is(err, db.ErrAlreadyExists) {
|
||||
|
|
|
@ -622,21 +622,16 @@ func (d *Dereferencer) fetchRemoteAccountAvatar(ctx context.Context, tsport tran
|
|||
processing, ok := d.derefAvatars[latestAcc.AvatarRemoteURL]
|
||||
|
||||
if !ok {
|
||||
var err error
|
||||
|
||||
// Set the media data function to dereference avatar from URI.
|
||||
data := func(ctx context.Context) (io.ReadCloser, int64, error) {
|
||||
return tsport.DereferenceMedia(ctx, avatarURI)
|
||||
}
|
||||
|
||||
// Create new media processing request from the media manager instance.
|
||||
processing, err = d.mediaManager.PreProcessMedia(ctx, data, latestAcc.ID, &media.AdditionalMediaInfo{
|
||||
processing = d.mediaManager.PreProcessMedia(data, latestAcc.ID, &media.AdditionalMediaInfo{
|
||||
Avatar: func() *bool { v := true; return &v }(),
|
||||
RemoteURL: &latestAcc.AvatarRemoteURL,
|
||||
})
|
||||
if err != nil {
|
||||
return gtserror.Newf("error preprocessing media for attachment %s: %w", latestAcc.AvatarRemoteURL, err)
|
||||
}
|
||||
|
||||
// Store media in map to mark as processing.
|
||||
d.derefAvatars[latestAcc.AvatarRemoteURL] = processing
|
||||
|
@ -713,21 +708,16 @@ func (d *Dereferencer) fetchRemoteAccountHeader(ctx context.Context, tsport tran
|
|||
processing, ok := d.derefHeaders[latestAcc.HeaderRemoteURL]
|
||||
|
||||
if !ok {
|
||||
var err error
|
||||
|
||||
// Set the media data function to dereference avatar from URI.
|
||||
data := func(ctx context.Context) (io.ReadCloser, int64, error) {
|
||||
return tsport.DereferenceMedia(ctx, headerURI)
|
||||
}
|
||||
|
||||
// Create new media processing request from the media manager instance.
|
||||
processing, err = d.mediaManager.PreProcessMedia(ctx, data, latestAcc.ID, &media.AdditionalMediaInfo{
|
||||
processing = d.mediaManager.PreProcessMedia(data, latestAcc.ID, &media.AdditionalMediaInfo{
|
||||
Header: func() *bool { v := true; return &v }(),
|
||||
RemoteURL: &latestAcc.HeaderRemoteURL,
|
||||
})
|
||||
if err != nil {
|
||||
return gtserror.Newf("error preprocessing media for attachment %s: %w", latestAcc.HeaderRemoteURL, err)
|
||||
}
|
||||
|
||||
// Store media in map to mark as processing.
|
||||
d.derefHeaders[latestAcc.HeaderRemoteURL] = processing
|
||||
|
|
|
@ -1,54 +0,0 @@
|
|||
// GoToSocial
|
||||
// Copyright (C) GoToSocial Authors admin@gotosocial.org
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
package dereferencing
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/url"
|
||||
|
||||
"github.com/superseriousbusiness/gotosocial/internal/media"
|
||||
)
|
||||
|
||||
func (d *Dereferencer) GetRemoteMedia(ctx context.Context, requestingUsername string, accountID string, remoteURL string, ai *media.AdditionalMediaInfo) (*media.ProcessingMedia, error) {
|
||||
if accountID == "" {
|
||||
return nil, fmt.Errorf("GetRemoteMedia: account ID was empty")
|
||||
}
|
||||
|
||||
t, err := d.transportController.NewTransportForUsername(ctx, requestingUsername)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("GetRemoteMedia: error creating transport: %s", err)
|
||||
}
|
||||
|
||||
derefURI, err := url.Parse(remoteURL)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("GetRemoteMedia: error parsing url: %s", err)
|
||||
}
|
||||
|
||||
dataFunc := func(innerCtx context.Context) (io.ReadCloser, int64, error) {
|
||||
return t.DereferenceMedia(innerCtx, derefURI)
|
||||
}
|
||||
|
||||
processingMedia, err := d.mediaManager.ProcessMedia(ctx, dataFunc, accountID, ai)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("GetRemoteMedia: error processing attachment: %s", err)
|
||||
}
|
||||
|
||||
return processingMedia, nil
|
||||
}
|
|
@ -1,161 +0,0 @@
|
|||
// GoToSocial
|
||||
// Copyright (C) GoToSocial Authors admin@gotosocial.org
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
package dereferencing_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/suite"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/gtsmodel"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/media"
|
||||
)
|
||||
|
||||
type AttachmentTestSuite struct {
|
||||
DereferencerStandardTestSuite
|
||||
}
|
||||
|
||||
func (suite *AttachmentTestSuite) TestDereferenceAttachmentBlocking() {
|
||||
ctx := context.Background()
|
||||
|
||||
fetchingAccount := suite.testAccounts["local_account_1"]
|
||||
|
||||
attachmentOwner := "01FENS9F666SEQ6TYQWEEY78GM"
|
||||
attachmentStatus := "01FENS9NTTVNEX1YZV7GB63MT8"
|
||||
attachmentContentType := "image/jpeg"
|
||||
attachmentURL := "https://s3-us-west-2.amazonaws.com/plushcity/media_attachments/files/106/867/380/219/163/828/original/88e8758c5f011439.jpg"
|
||||
attachmentDescription := "It's a cute plushie."
|
||||
attachmentBlurhash := "LtQ9yKi__4%g%MRjWCt7%hozM_az"
|
||||
|
||||
media, err := suite.dereferencer.GetRemoteMedia(ctx, fetchingAccount.Username, attachmentOwner, attachmentURL, &media.AdditionalMediaInfo{
|
||||
StatusID: &attachmentStatus,
|
||||
RemoteURL: &attachmentURL,
|
||||
Description: &attachmentDescription,
|
||||
Blurhash: &attachmentBlurhash,
|
||||
})
|
||||
suite.NoError(err)
|
||||
|
||||
// make a blocking call to load the attachment from the in-process media
|
||||
attachment, err := media.LoadAttachment(ctx)
|
||||
suite.NoError(err)
|
||||
|
||||
suite.NotNil(attachment)
|
||||
|
||||
suite.Equal(attachmentOwner, attachment.AccountID)
|
||||
suite.Equal(attachmentStatus, attachment.StatusID)
|
||||
suite.Equal(attachmentURL, attachment.RemoteURL)
|
||||
suite.NotEmpty(attachment.URL)
|
||||
suite.NotEmpty(attachment.Blurhash)
|
||||
suite.NotEmpty(attachment.ID)
|
||||
suite.NotEmpty(attachment.CreatedAt)
|
||||
suite.NotEmpty(attachment.UpdatedAt)
|
||||
suite.EqualValues(1.3365462, attachment.FileMeta.Original.Aspect)
|
||||
suite.Equal(2071680, attachment.FileMeta.Original.Size)
|
||||
suite.Equal(1245, attachment.FileMeta.Original.Height)
|
||||
suite.Equal(1664, attachment.FileMeta.Original.Width)
|
||||
suite.Equal(attachmentBlurhash, attachment.Blurhash)
|
||||
suite.Equal(gtsmodel.ProcessingStatusProcessed, attachment.Processing)
|
||||
suite.NotEmpty(attachment.File.Path)
|
||||
suite.Equal(attachmentContentType, attachment.File.ContentType)
|
||||
suite.Equal(attachmentDescription, attachment.Description)
|
||||
|
||||
suite.NotEmpty(attachment.Thumbnail.Path)
|
||||
suite.NotEmpty(attachment.Type)
|
||||
|
||||
// attachment should also now be in the database
|
||||
dbAttachment, err := suite.db.GetAttachmentByID(context.Background(), attachment.ID)
|
||||
suite.NoError(err)
|
||||
suite.NotNil(dbAttachment)
|
||||
|
||||
suite.Equal(attachmentOwner, dbAttachment.AccountID)
|
||||
suite.Equal(attachmentStatus, dbAttachment.StatusID)
|
||||
suite.Equal(attachmentURL, dbAttachment.RemoteURL)
|
||||
suite.NotEmpty(dbAttachment.URL)
|
||||
suite.NotEmpty(dbAttachment.Blurhash)
|
||||
suite.NotEmpty(dbAttachment.ID)
|
||||
suite.NotEmpty(dbAttachment.CreatedAt)
|
||||
suite.NotEmpty(dbAttachment.UpdatedAt)
|
||||
suite.EqualValues(1.3365462, dbAttachment.FileMeta.Original.Aspect)
|
||||
suite.Equal(2071680, dbAttachment.FileMeta.Original.Size)
|
||||
suite.Equal(1245, dbAttachment.FileMeta.Original.Height)
|
||||
suite.Equal(1664, dbAttachment.FileMeta.Original.Width)
|
||||
suite.Equal(attachmentBlurhash, dbAttachment.Blurhash)
|
||||
suite.Equal(gtsmodel.ProcessingStatusProcessed, dbAttachment.Processing)
|
||||
suite.NotEmpty(dbAttachment.File.Path)
|
||||
suite.Equal(attachmentContentType, dbAttachment.File.ContentType)
|
||||
suite.Equal(attachmentDescription, dbAttachment.Description)
|
||||
|
||||
suite.NotEmpty(dbAttachment.Thumbnail.Path)
|
||||
suite.NotEmpty(dbAttachment.Type)
|
||||
}
|
||||
|
||||
func (suite *AttachmentTestSuite) TestDereferenceAttachmentAsync() {
|
||||
ctx := context.Background()
|
||||
|
||||
fetchingAccount := suite.testAccounts["local_account_1"]
|
||||
|
||||
attachmentOwner := "01FENS9F666SEQ6TYQWEEY78GM"
|
||||
attachmentStatus := "01FENS9NTTVNEX1YZV7GB63MT8"
|
||||
attachmentContentType := "image/jpeg"
|
||||
attachmentURL := "https://s3-us-west-2.amazonaws.com/plushcity/media_attachments/files/106/867/380/219/163/828/original/88e8758c5f011439.jpg"
|
||||
attachmentDescription := "It's a cute plushie."
|
||||
attachmentBlurhash := "LtQ9yKi__4%g%MRjWCt7%hozM_az"
|
||||
|
||||
processingMedia, err := suite.dereferencer.GetRemoteMedia(ctx, fetchingAccount.Username, attachmentOwner, attachmentURL, &media.AdditionalMediaInfo{
|
||||
StatusID: &attachmentStatus,
|
||||
RemoteURL: &attachmentURL,
|
||||
Description: &attachmentDescription,
|
||||
Blurhash: &attachmentBlurhash,
|
||||
})
|
||||
suite.NoError(err)
|
||||
attachmentID := processingMedia.AttachmentID()
|
||||
|
||||
time.Sleep(time.Second * 3)
|
||||
|
||||
// now get the attachment from the database
|
||||
attachment, err := suite.db.GetAttachmentByID(ctx, attachmentID)
|
||||
suite.NoError(err)
|
||||
|
||||
suite.NotNil(attachment)
|
||||
|
||||
suite.Equal(attachmentOwner, attachment.AccountID)
|
||||
suite.Equal(attachmentStatus, attachment.StatusID)
|
||||
suite.Equal(attachmentURL, attachment.RemoteURL)
|
||||
suite.NotEmpty(attachment.URL)
|
||||
suite.NotEmpty(attachment.Blurhash)
|
||||
suite.NotEmpty(attachment.ID)
|
||||
suite.NotEmpty(attachment.CreatedAt)
|
||||
suite.NotEmpty(attachment.UpdatedAt)
|
||||
suite.EqualValues(1.3365462, attachment.FileMeta.Original.Aspect)
|
||||
suite.Equal(2071680, attachment.FileMeta.Original.Size)
|
||||
suite.Equal(1245, attachment.FileMeta.Original.Height)
|
||||
suite.Equal(1664, attachment.FileMeta.Original.Width)
|
||||
suite.Equal(attachmentBlurhash, attachment.Blurhash)
|
||||
suite.Equal(gtsmodel.ProcessingStatusProcessed, attachment.Processing)
|
||||
suite.NotEmpty(attachment.File.Path)
|
||||
suite.Equal(attachmentContentType, attachment.File.ContentType)
|
||||
suite.Equal(attachmentDescription, attachment.Description)
|
||||
|
||||
suite.NotEmpty(attachment.Thumbnail.Path)
|
||||
suite.NotEmpty(attachment.Type)
|
||||
}
|
||||
|
||||
func TestAttachmentTestSuite(t *testing.T) {
|
||||
suite.Run(t, new(AttachmentTestSuite))
|
||||
}
|
|
@ -789,7 +789,7 @@ func (d *Dereferencer) fetchStatusAttachments(ctx context.Context, tsport transp
|
|||
for i := range status.Attachments {
|
||||
attachment := status.Attachments[i]
|
||||
|
||||
// Look for existing media attachment with remoet URL first.
|
||||
// Look for existing media attachment with remote URL first.
|
||||
existing, ok := existing.GetAttachmentByRemoteURL(attachment.RemoteURL)
|
||||
if ok && existing.ID != "" && *existing.Cached {
|
||||
status.Attachments[i] = existing
|
||||
|
@ -804,25 +804,33 @@ func (d *Dereferencer) fetchStatusAttachments(ctx context.Context, tsport transp
|
|||
continue
|
||||
}
|
||||
|
||||
// Start pre-processing remote media at remote URL.
|
||||
processing, err := d.mediaManager.PreProcessMedia(ctx, func(ctx context.Context) (io.ReadCloser, int64, error) {
|
||||
data := func(ctx context.Context) (io.ReadCloser, int64, error) {
|
||||
return tsport.DereferenceMedia(ctx, remoteURL)
|
||||
}, status.AccountID, &media.AdditionalMediaInfo{
|
||||
}
|
||||
|
||||
ai := &media.AdditionalMediaInfo{
|
||||
StatusID: &status.ID,
|
||||
RemoteURL: &attachment.RemoteURL,
|
||||
Description: &attachment.Description,
|
||||
Blurhash: &attachment.Blurhash,
|
||||
})
|
||||
if err != nil {
|
||||
log.Errorf(ctx, "error processing attachment: %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Start pre-processing remote media at remote URL.
|
||||
processing := d.mediaManager.PreProcessMedia(data, status.AccountID, ai)
|
||||
|
||||
// Force attachment loading *right now*.
|
||||
attachment, err = processing.LoadAttachment(ctx)
|
||||
if err != nil {
|
||||
log.Errorf(ctx, "error loading attachment: %v", err)
|
||||
continue
|
||||
if attachment == nil {
|
||||
// Totally failed to load;
|
||||
// bail on this attachment.
|
||||
log.Errorf(ctx, "error loading attachment: %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Partially loaded. Keep as
|
||||
// placeholder and try again later.
|
||||
log.Warnf(ctx, "partially loaded attachment: %v", err)
|
||||
}
|
||||
|
||||
// Set the *new* attachment and ID.
|
||||
|
@ -832,8 +840,7 @@ func (d *Dereferencer) fetchStatusAttachments(ctx context.Context, tsport transp
|
|||
|
||||
for i := 0; i < len(status.AttachmentIDs); {
|
||||
if status.AttachmentIDs[i] == "" {
|
||||
// This is a failed attachment population, this may
|
||||
// be due to us not currently supporting a media type.
|
||||
// Remove totally failed attachment populations
|
||||
copy(status.Attachments[i:], status.Attachments[i+1:])
|
||||
copy(status.AttachmentIDs[i:], status.AttachmentIDs[i+1:])
|
||||
status.Attachments = status.Attachments[:len(status.Attachments)-1]
|
||||
|
|
|
@ -43,9 +43,10 @@ const (
|
|||
// Unretrievable checks error for a stored "unretrievable" flag.
|
||||
//
|
||||
// Unretrievable indicates that a call to retrieve a resource
|
||||
// (account, status, etc) could not be fulfilled, either because
|
||||
// it was not found locally, or because some prerequisite remote
|
||||
// resource call failed, making it impossible to return the item.
|
||||
// (account, status, attachment, etc) could not be fulfilled,
|
||||
// either because it was not found locally, or because some
|
||||
// prerequisite remote resource call failed, making it impossible
|
||||
// to return the item.
|
||||
func Unretrievable(err error) bool {
|
||||
_, ok := errors.Value(err, unrtrvableKey).(struct{})
|
||||
return ok
|
||||
|
|
|
@ -30,7 +30,7 @@ type MediaAttachment struct {
|
|||
StatusID string `bun:"type:CHAR(26),nullzero"` // ID of the status to which this is attached
|
||||
URL string `bun:",nullzero"` // Where can the attachment be retrieved on *this* server
|
||||
RemoteURL string `bun:",nullzero"` // Where can the attachment be retrieved on a remote server (empty for local media)
|
||||
Type FileType `bun:",nullzero,notnull"` // Type of file (image/gifv/audio/video)
|
||||
Type FileType `bun:",nullzero,notnull"` // Type of file (image/gifv/audio/video/unknown)
|
||||
FileMeta FileMeta `bun:",embed:,nullzero,notnull"` // Metadata about the file
|
||||
AccountID string `bun:"type:CHAR(26),nullzero,notnull"` // To which account does this attachment belong
|
||||
Description string `bun:""` // Description of the attachment (for screenreaders)
|
||||
|
|
|
@ -20,7 +20,6 @@ package media
|
|||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"time"
|
||||
|
||||
|
@ -32,6 +31,7 @@ import (
|
|||
"github.com/superseriousbusiness/gotosocial/internal/log"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/state"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/uris"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/util"
|
||||
)
|
||||
|
||||
var SupportedMIMETypes = []string{
|
||||
|
@ -57,52 +57,67 @@ func NewManager(state *state.State) *Manager {
|
|||
return m
|
||||
}
|
||||
|
||||
// PreProcessMedia begins the process of decoding and storing the given data as an attachment.
|
||||
// It will return a pointer to a ProcessingMedia struct upon which further actions can be performed, such as getting
|
||||
// the finished media, thumbnail, attachment, etc.
|
||||
// PreProcessMedia begins the process of decoding
|
||||
// and storing the given data as an attachment.
|
||||
// It will return a pointer to a ProcessingMedia
|
||||
// struct upon which further actions can be performed,
|
||||
// such as getting the finished media, thumbnail,
|
||||
// attachment, etc.
|
||||
//
|
||||
// data should be a function that the media manager can call to return a reader containing the media data.
|
||||
// - data: a function that the media manager can call
|
||||
// to return a reader containing the media data.
|
||||
// - accountID: the account that the media belongs to.
|
||||
// - ai: optional and can be nil. Any additional information
|
||||
// about the attachment provided will be put in the database.
|
||||
//
|
||||
// accountID should be the account that the media belongs to.
|
||||
//
|
||||
// ai is optional and can be nil. Any additional information about the attachment provided will be put in the database.
|
||||
//
|
||||
// Note: unlike ProcessMedia, this will NOT queue the media to be asynchronously processed.
|
||||
func (m *Manager) PreProcessMedia(ctx context.Context, data DataFunc, accountID string, ai *AdditionalMediaInfo) (*ProcessingMedia, error) {
|
||||
id, err := id.NewRandomULID()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
avatar := false
|
||||
header := false
|
||||
cached := false
|
||||
// Note: unlike ProcessMedia, this will NOT
|
||||
// queue the media to be asynchronously processed.
|
||||
func (m *Manager) PreProcessMedia(
|
||||
data DataFunc,
|
||||
accountID string,
|
||||
ai *AdditionalMediaInfo,
|
||||
) *ProcessingMedia {
|
||||
// Populate initial fields on the new media,
|
||||
// leaving out fields with values we don't know
|
||||
// yet. These will be overwritten as we go.
|
||||
now := time.Now()
|
||||
|
||||
// populate initial fields on the media attachment -- some of these will be overwritten as we proceed
|
||||
attachment := >smodel.MediaAttachment{
|
||||
ID: id,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
StatusID: "",
|
||||
URL: "", // we don't know yet because it depends on the uncalled DataFunc
|
||||
RemoteURL: "",
|
||||
Type: gtsmodel.FileTypeUnknown, // we don't know yet because it depends on the uncalled DataFunc
|
||||
FileMeta: gtsmodel.FileMeta{},
|
||||
AccountID: accountID,
|
||||
Description: "",
|
||||
ScheduledStatusID: "",
|
||||
Blurhash: "",
|
||||
Processing: gtsmodel.ProcessingStatusReceived,
|
||||
File: gtsmodel.File{UpdatedAt: now},
|
||||
Thumbnail: gtsmodel.Thumbnail{UpdatedAt: now},
|
||||
Avatar: &avatar,
|
||||
Header: &header,
|
||||
Cached: &cached,
|
||||
ID: id.NewULID(),
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
Type: gtsmodel.FileTypeUnknown,
|
||||
FileMeta: gtsmodel.FileMeta{},
|
||||
AccountID: accountID,
|
||||
Processing: gtsmodel.ProcessingStatusReceived,
|
||||
File: gtsmodel.File{
|
||||
UpdatedAt: now,
|
||||
ContentType: "application/octet-stream",
|
||||
},
|
||||
Thumbnail: gtsmodel.Thumbnail{UpdatedAt: now},
|
||||
Avatar: util.Ptr(false),
|
||||
Header: util.Ptr(false),
|
||||
Cached: util.Ptr(false),
|
||||
}
|
||||
|
||||
// check if we have additional info to add to the attachment,
|
||||
// and overwrite some of the attachment fields if so
|
||||
attachment.URL = uris.URIForAttachment(
|
||||
accountID,
|
||||
string(TypeAttachment),
|
||||
string(SizeOriginal),
|
||||
attachment.ID,
|
||||
"unknown",
|
||||
)
|
||||
|
||||
attachment.File.Path = uris.StoragePathForAttachment(
|
||||
accountID,
|
||||
string(TypeAttachment),
|
||||
string(SizeOriginal),
|
||||
attachment.ID,
|
||||
"unknown",
|
||||
)
|
||||
|
||||
// Check if we were provided additional info
|
||||
// to add to the attachment, and overwrite
|
||||
// some of the attachment fields if so.
|
||||
if ai != nil {
|
||||
if ai.CreatedAt != nil {
|
||||
attachment.CreatedAt = *ai.CreatedAt
|
||||
|
@ -151,14 +166,21 @@ func (m *Manager) PreProcessMedia(ctx context.Context, data DataFunc, accountID
|
|||
mgr: m,
|
||||
}
|
||||
|
||||
return processingMedia, nil
|
||||
return processingMedia
|
||||
}
|
||||
|
||||
// PreProcessMediaRecache refetches, reprocesses, and recaches an existing attachment that has been uncached via cleaner pruning.
|
||||
// PreProcessMediaRecache refetches, reprocesses,
|
||||
// and recaches an existing attachment that has
|
||||
// been uncached via cleaner pruning.
|
||||
//
|
||||
// Note: unlike ProcessMedia, this will NOT queue the media to be asychronously processed.
|
||||
func (m *Manager) PreProcessMediaRecache(ctx context.Context, data DataFunc, attachmentID string) (*ProcessingMedia, error) {
|
||||
// get the existing attachment from database.
|
||||
// Note: unlike ProcessMedia, this will NOT queue
|
||||
// the media to be asychronously processed.
|
||||
func (m *Manager) PreProcessMediaRecache(
|
||||
ctx context.Context,
|
||||
data DataFunc,
|
||||
attachmentID string,
|
||||
) (*ProcessingMedia, error) {
|
||||
// Get the existing attachment from database.
|
||||
attachment, err := m.state.DB.GetAttachmentByID(ctx, attachmentID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -167,43 +189,39 @@ func (m *Manager) PreProcessMediaRecache(ctx context.Context, data DataFunc, att
|
|||
processingMedia := &ProcessingMedia{
|
||||
media: attachment,
|
||||
dataFn: data,
|
||||
recache: true, // indicate it's a recache
|
||||
recache: true, // Indicate it's a recache.
|
||||
mgr: m,
|
||||
}
|
||||
|
||||
return processingMedia, nil
|
||||
}
|
||||
|
||||
// ProcessMedia will call PreProcessMedia, followed by queuing the media to be processing in the media worker queue.
|
||||
func (m *Manager) ProcessMedia(ctx context.Context, data DataFunc, accountID string, ai *AdditionalMediaInfo) (*ProcessingMedia, error) {
|
||||
// Create a new processing media object for this media request.
|
||||
media, err := m.PreProcessMedia(ctx, data, accountID, ai)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Attempt to add this media processing item to the worker queue.
|
||||
_ = m.state.Workers.Media.MustEnqueueCtx(ctx, media.Process)
|
||||
|
||||
return media, nil
|
||||
}
|
||||
|
||||
// PreProcessEmoji begins the process of decoding and storing the given data as an emoji.
|
||||
// It will return a pointer to a ProcessingEmoji struct upon which further actions can be performed, such as getting
|
||||
// the finished media, thumbnail, attachment, etc.
|
||||
// PreProcessEmoji begins the process of decoding and storing
|
||||
// the given data as an emoji. It will return a pointer to a
|
||||
// ProcessingEmoji struct upon which further actions can be
|
||||
// performed, such as getting the finished media, thumbnail,
|
||||
// attachment, etc.
|
||||
//
|
||||
// data should be a function that the media manager can call to return a reader containing the emoji data.
|
||||
// - data: function that the media manager can call
|
||||
// to return a reader containing the emoji data.
|
||||
// - shortcode: the emoji shortcode without the ':'s around it.
|
||||
// - emojiID: database ID that should be used to store the emoji.
|
||||
// - uri: ActivityPub URI/ID of the emoji.
|
||||
// - ai: optional and can be nil. Any additional information
|
||||
// about the emoji provided will be put in the database.
|
||||
// - refresh: refetch/refresh the emoji.
|
||||
//
|
||||
// shortcode should be the emoji shortcode without the ':'s around it.
|
||||
//
|
||||
// id is the database ID that should be used to store the emoji.
|
||||
//
|
||||
// uri is the ActivityPub URI/ID of the emoji.
|
||||
//
|
||||
// ai is optional and can be nil. Any additional information about the emoji provided will be put in the database.
|
||||
//
|
||||
// Note: unlike ProcessEmoji, this will NOT queue the emoji to be asynchronously processed.
|
||||
func (m *Manager) PreProcessEmoji(ctx context.Context, data DataFunc, shortcode string, emojiID string, uri string, ai *AdditionalEmojiInfo, refresh bool) (*ProcessingEmoji, error) {
|
||||
// Note: unlike ProcessEmoji, this will NOT queue
|
||||
// the emoji to be asynchronously processed.
|
||||
func (m *Manager) PreProcessEmoji(
|
||||
ctx context.Context,
|
||||
data DataFunc,
|
||||
shortcode string,
|
||||
emojiID string,
|
||||
uri string,
|
||||
ai *AdditionalEmojiInfo,
|
||||
refresh bool,
|
||||
) (*ProcessingEmoji, error) {
|
||||
var (
|
||||
newPathID string
|
||||
emoji *gtsmodel.Emoji
|
||||
|
@ -217,18 +235,22 @@ func (m *Manager) PreProcessEmoji(ctx context.Context, data DataFunc, shortcode
|
|||
}
|
||||
|
||||
if refresh {
|
||||
// Look for existing emoji by given ID.
|
||||
// Existing emoji!
|
||||
|
||||
emoji, err = m.state.DB.GetEmojiByID(ctx, emojiID)
|
||||
if err != nil {
|
||||
return nil, gtserror.Newf("error fetching emoji to refresh from the db: %s", err)
|
||||
err = gtserror.Newf("error fetching emoji to refresh from the db: %w", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// if this is a refresh, we will end up with new images
|
||||
// stored for this emoji, so we can use an io.Closer callback
|
||||
// to perform clean up of the old images from storage
|
||||
// Since this is a refresh, we will end up with
|
||||
// new images stored for this emoji, so we should
|
||||
// use an io.Closer callback to perform clean up
|
||||
// of the original images from storage.
|
||||
originalData := data
|
||||
originalImagePath := emoji.ImagePath
|
||||
originalImageStaticPath := emoji.ImageStaticPath
|
||||
|
||||
data = func(ctx context.Context) (io.ReadCloser, int64, error) {
|
||||
// Call original data func.
|
||||
rc, sz, err := originalData(ctx)
|
||||
|
@ -251,49 +273,81 @@ func (m *Manager) PreProcessEmoji(ctx context.Context, data DataFunc, shortcode
|
|||
return iotools.ReadCloser(rc, c), sz, nil
|
||||
}
|
||||
|
||||
// Reuse existing shortcode and URI -
|
||||
// these don't change when we refresh.
|
||||
emoji.Shortcode = shortcode
|
||||
emoji.URI = uri
|
||||
|
||||
// Use a new ID to create a new path
|
||||
// for the new images, to get around
|
||||
// needing to do cache invalidation.
|
||||
newPathID, err = id.NewRandomULID()
|
||||
if err != nil {
|
||||
return nil, gtserror.Newf("error generating alternateID for emoji refresh: %s", err)
|
||||
}
|
||||
|
||||
// store + serve static image at new path ID
|
||||
emoji.ImageStaticURL = uris.GenerateURIForAttachment(instanceAcc.ID, string(TypeEmoji), string(SizeStatic), newPathID, mimePng)
|
||||
emoji.ImageStaticPath = fmt.Sprintf("%s/%s/%s/%s.%s", instanceAcc.ID, TypeEmoji, SizeStatic, newPathID, mimePng)
|
||||
emoji.ImageStaticURL = uris.URIForAttachment(
|
||||
instanceAcc.ID,
|
||||
string(TypeEmoji),
|
||||
string(SizeStatic),
|
||||
newPathID,
|
||||
// All static emojis
|
||||
// are encoded as png.
|
||||
mimePng,
|
||||
)
|
||||
|
||||
emoji.Shortcode = shortcode
|
||||
emoji.URI = uri
|
||||
emoji.ImageStaticPath = uris.StoragePathForAttachment(
|
||||
instanceAcc.ID,
|
||||
string(TypeEmoji),
|
||||
string(SizeStatic),
|
||||
newPathID,
|
||||
// All static emojis
|
||||
// are encoded as png.
|
||||
mimePng,
|
||||
)
|
||||
} else {
|
||||
disabled := false
|
||||
visibleInPicker := true
|
||||
// New emoji!
|
||||
|
||||
// populate initial fields on the emoji -- some of these will be overwritten as we proceed
|
||||
imageStaticURL := uris.URIForAttachment(
|
||||
instanceAcc.ID,
|
||||
string(TypeEmoji),
|
||||
string(SizeStatic),
|
||||
emojiID,
|
||||
// All static emojis
|
||||
// are encoded as png.
|
||||
mimePng,
|
||||
)
|
||||
|
||||
imageStaticPath := uris.StoragePathForAttachment(
|
||||
instanceAcc.ID,
|
||||
string(TypeEmoji),
|
||||
string(SizeStatic),
|
||||
emojiID,
|
||||
// All static emojis
|
||||
// are encoded as png.
|
||||
mimePng,
|
||||
)
|
||||
|
||||
// Populate initial fields on the new emoji,
|
||||
// leaving out fields with values we don't know
|
||||
// yet. These will be overwritten as we go.
|
||||
emoji = >smodel.Emoji{
|
||||
ID: emojiID,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
Shortcode: shortcode,
|
||||
Domain: "", // assume our own domain unless told otherwise
|
||||
ImageRemoteURL: "",
|
||||
ImageStaticRemoteURL: "",
|
||||
ImageURL: "", // we don't know yet
|
||||
ImageStaticURL: uris.GenerateURIForAttachment(instanceAcc.ID, string(TypeEmoji), string(SizeStatic), emojiID, mimePng), // all static emojis are encoded as png
|
||||
ImagePath: "", // we don't know yet
|
||||
ImageStaticPath: fmt.Sprintf("%s/%s/%s/%s.%s", instanceAcc.ID, TypeEmoji, SizeStatic, emojiID, mimePng), // all static emojis are encoded as png
|
||||
ImageContentType: "", // we don't know yet
|
||||
ImageStaticContentType: mimeImagePng, // all static emojis are encoded as png
|
||||
ImageFileSize: 0,
|
||||
ImageStaticFileSize: 0,
|
||||
Disabled: &disabled,
|
||||
ImageStaticURL: imageStaticURL,
|
||||
ImageStaticPath: imageStaticPath,
|
||||
ImageStaticContentType: mimeImagePng,
|
||||
ImageUpdatedAt: now,
|
||||
Disabled: util.Ptr(false),
|
||||
URI: uri,
|
||||
VisibleInPicker: &visibleInPicker,
|
||||
CategoryID: "",
|
||||
VisibleInPicker: util.Ptr(true),
|
||||
}
|
||||
}
|
||||
|
||||
emoji.ImageUpdatedAt = now
|
||||
emoji.UpdatedAt = now
|
||||
|
||||
// check if we have additional info to add to the emoji,
|
||||
// and overwrite some of the emoji fields if so
|
||||
// Check if we have additional info to add to the emoji,
|
||||
// and overwrite some of the emoji fields if so.
|
||||
if ai != nil {
|
||||
if ai.CreatedAt != nil {
|
||||
emoji.CreatedAt = *ai.CreatedAt
|
||||
|
@ -335,11 +389,17 @@ func (m *Manager) PreProcessEmoji(ctx context.Context, data DataFunc, shortcode
|
|||
return processingEmoji, nil
|
||||
}
|
||||
|
||||
// PreProcessEmojiRecache refetches, reprocesses, and recaches an existing emoji that has been uncached via cleaner pruning.
|
||||
// PreProcessEmojiRecache refetches, reprocesses, and recaches
|
||||
// an existing emoji that has been uncached via cleaner pruning.
|
||||
//
|
||||
// Note: unlike ProcessEmoji, this will NOT queue the emoji to be asychronously processed.
|
||||
func (m *Manager) PreProcessEmojiRecache(ctx context.Context, data DataFunc, emojiID string) (*ProcessingEmoji, error) {
|
||||
// get the existing emoji from the database.
|
||||
// Note: unlike ProcessEmoji, this will NOT queue the emoji to
|
||||
// be asychronously processed.
|
||||
func (m *Manager) PreProcessEmojiRecache(
|
||||
ctx context.Context,
|
||||
data DataFunc,
|
||||
emojiID string,
|
||||
) (*ProcessingEmoji, error) {
|
||||
// Get the existing emoji from the database.
|
||||
emoji, err := m.state.DB.GetEmojiByID(ctx, emojiID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -348,15 +408,24 @@ func (m *Manager) PreProcessEmojiRecache(ctx context.Context, data DataFunc, emo
|
|||
processingEmoji := &ProcessingEmoji{
|
||||
emoji: emoji,
|
||||
dataFn: data,
|
||||
existing: true, // inidcate recache
|
||||
existing: true, // Indicate recache.
|
||||
mgr: m,
|
||||
}
|
||||
|
||||
return processingEmoji, nil
|
||||
}
|
||||
|
||||
// ProcessEmoji will call PreProcessEmoji, followed by queuing the emoji to be processing in the emoji worker queue.
|
||||
func (m *Manager) ProcessEmoji(ctx context.Context, data DataFunc, shortcode string, id string, uri string, ai *AdditionalEmojiInfo, refresh bool) (*ProcessingEmoji, error) {
|
||||
// ProcessEmoji will call PreProcessEmoji, followed
|
||||
// by queuing the emoji in the emoji worker queue.
|
||||
func (m *Manager) ProcessEmoji(
|
||||
ctx context.Context,
|
||||
data DataFunc,
|
||||
shortcode string,
|
||||
id string,
|
||||
uri string,
|
||||
ai *AdditionalEmojiInfo,
|
||||
refresh bool,
|
||||
) (*ProcessingEmoji, error) {
|
||||
// Create a new processing emoji object for this emoji request.
|
||||
emoji, err := m.PreProcessEmoji(ctx, data, shortcode, id, uri, ai, refresh)
|
||||
if err != nil {
|
||||
|
|
|
@ -33,7 +33,6 @@ import (
|
|||
"github.com/superseriousbusiness/gotosocial/internal/media"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/state"
|
||||
gtsstorage "github.com/superseriousbusiness/gotosocial/internal/storage"
|
||||
"github.com/superseriousbusiness/gotosocial/testrig"
|
||||
)
|
||||
|
||||
type ManagerTestSuite struct {
|
||||
|
@ -319,8 +318,7 @@ func (suite *ManagerTestSuite) TestSimpleJpegProcessBlocking() {
|
|||
accountID := "01FS1X72SK9ZPW0J1QQ68BD264"
|
||||
|
||||
// process the media with no additional info provided
|
||||
processingMedia, err := suite.manager.ProcessMedia(ctx, data, accountID, nil)
|
||||
suite.NoError(err)
|
||||
processingMedia := suite.manager.PreProcessMedia(data, accountID, nil)
|
||||
// fetch the attachment id from the processing media
|
||||
attachmentID := processingMedia.AttachmentID()
|
||||
|
||||
|
@ -376,6 +374,131 @@ func (suite *ManagerTestSuite) TestSimpleJpegProcessBlocking() {
|
|||
suite.Equal(processedThumbnailBytesExpected, processedThumbnailBytes)
|
||||
}
|
||||
|
||||
func (suite *ManagerTestSuite) TestSimpleJpegProcessPartial() {
|
||||
ctx := context.Background()
|
||||
|
||||
data := func(_ context.Context) (io.ReadCloser, int64, error) {
|
||||
// load bytes from a test image
|
||||
b, err := os.ReadFile("./test/test-jpeg.jpg")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Fuck up the bytes a bit by cutting
|
||||
// off the second half, tee hee!
|
||||
b = b[:len(b)/2]
|
||||
|
||||
return io.NopCloser(bytes.NewBuffer(b)), int64(len(b)), nil
|
||||
}
|
||||
|
||||
accountID := "01FS1X72SK9ZPW0J1QQ68BD264"
|
||||
|
||||
// process the media with no additional info provided
|
||||
processingMedia := suite.manager.PreProcessMedia(data, accountID, nil)
|
||||
|
||||
// fetch the attachment id from the processing media
|
||||
attachmentID := processingMedia.AttachmentID()
|
||||
|
||||
// do a blocking call to fetch the attachment
|
||||
attachment, err := processingMedia.LoadAttachment(ctx)
|
||||
|
||||
// Since we're cutting off the byte stream
|
||||
// halfway through, we should get an error here.
|
||||
suite.EqualError(err, "finish: error decoding image: unexpected EOF")
|
||||
suite.NotNil(attachment)
|
||||
|
||||
// make sure it's got the stuff set on it that we expect
|
||||
// the attachment ID and accountID we expect
|
||||
suite.Equal(attachmentID, attachment.ID)
|
||||
suite.Equal(accountID, attachment.AccountID)
|
||||
|
||||
// file meta should be correctly derived from the image
|
||||
suite.Zero(attachment.FileMeta)
|
||||
suite.Equal("image/jpeg", attachment.File.ContentType)
|
||||
suite.Equal("image/jpeg", attachment.Thumbnail.ContentType)
|
||||
suite.Empty(attachment.Blurhash)
|
||||
|
||||
// now make sure the attachment is in the database
|
||||
dbAttachment, err := suite.db.GetAttachmentByID(ctx, attachmentID)
|
||||
suite.NoError(err)
|
||||
suite.NotNil(dbAttachment)
|
||||
|
||||
// Attachment should have type unknown
|
||||
suite.Equal(gtsmodel.FileTypeUnknown, dbAttachment.Type)
|
||||
|
||||
// Nothing should be in storage for this attachment.
|
||||
stored, err := suite.storage.Has(ctx, attachment.File.Path)
|
||||
if err != nil {
|
||||
suite.FailNow(err.Error())
|
||||
}
|
||||
suite.False(stored)
|
||||
|
||||
stored, err = suite.storage.Has(ctx, attachment.Thumbnail.Path)
|
||||
if err != nil {
|
||||
suite.FailNow(err.Error())
|
||||
}
|
||||
suite.False(stored)
|
||||
}
|
||||
|
||||
func (suite *ManagerTestSuite) TestPDFProcess() {
|
||||
ctx := context.Background()
|
||||
|
||||
data := func(_ context.Context) (io.ReadCloser, int64, error) {
|
||||
// load bytes from Frantz
|
||||
b, err := os.ReadFile("./test/Frantz-Fanon-The-Wretched-of-the-Earth-1965.pdf")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return io.NopCloser(bytes.NewBuffer(b)), int64(len(b)), nil
|
||||
}
|
||||
|
||||
accountID := "01FS1X72SK9ZPW0J1QQ68BD264"
|
||||
|
||||
// process the media with no additional info provided
|
||||
processingMedia := suite.manager.PreProcessMedia(data, accountID, nil)
|
||||
|
||||
// fetch the attachment id from the processing media
|
||||
attachmentID := processingMedia.AttachmentID()
|
||||
|
||||
// do a blocking call to fetch the attachment
|
||||
attachment, err := processingMedia.LoadAttachment(ctx)
|
||||
suite.NoError(err)
|
||||
suite.NotNil(attachment)
|
||||
|
||||
// make sure it's got the stuff set on it that we expect
|
||||
// the attachment ID and accountID we expect
|
||||
suite.Equal(attachmentID, attachment.ID)
|
||||
suite.Equal(accountID, attachment.AccountID)
|
||||
|
||||
// file meta should be correctly derived from the image
|
||||
suite.Zero(attachment.FileMeta)
|
||||
suite.Equal("application/pdf", attachment.File.ContentType)
|
||||
suite.Equal("image/jpeg", attachment.Thumbnail.ContentType)
|
||||
suite.Empty(attachment.Blurhash)
|
||||
|
||||
// now make sure the attachment is in the database
|
||||
dbAttachment, err := suite.db.GetAttachmentByID(ctx, attachmentID)
|
||||
suite.NoError(err)
|
||||
suite.NotNil(dbAttachment)
|
||||
|
||||
// Attachment should have type unknown
|
||||
suite.Equal(gtsmodel.FileTypeUnknown, dbAttachment.Type)
|
||||
|
||||
// Nothing should be in storage for this attachment.
|
||||
stored, err := suite.storage.Has(ctx, attachment.File.Path)
|
||||
if err != nil {
|
||||
suite.FailNow(err.Error())
|
||||
}
|
||||
suite.False(stored)
|
||||
|
||||
stored, err = suite.storage.Has(ctx, attachment.Thumbnail.Path)
|
||||
if err != nil {
|
||||
suite.FailNow(err.Error())
|
||||
}
|
||||
suite.False(stored)
|
||||
}
|
||||
|
||||
func (suite *ManagerTestSuite) TestSlothVineProcessBlocking() {
|
||||
ctx := context.Background()
|
||||
|
||||
|
@ -391,8 +514,7 @@ func (suite *ManagerTestSuite) TestSlothVineProcessBlocking() {
|
|||
accountID := "01FS1X72SK9ZPW0J1QQ68BD264"
|
||||
|
||||
// process the media with no additional info provided
|
||||
processingMedia, err := suite.manager.ProcessMedia(ctx, data, accountID, nil)
|
||||
suite.NoError(err)
|
||||
processingMedia := suite.manager.PreProcessMedia(data, accountID, nil)
|
||||
// fetch the attachment id from the processing media
|
||||
attachmentID := processingMedia.AttachmentID()
|
||||
|
||||
|
@ -467,8 +589,7 @@ func (suite *ManagerTestSuite) TestLongerMp4ProcessBlocking() {
|
|||
accountID := "01FS1X72SK9ZPW0J1QQ68BD264"
|
||||
|
||||
// process the media with no additional info provided
|
||||
processingMedia, err := suite.manager.ProcessMedia(ctx, data, accountID, nil)
|
||||
suite.NoError(err)
|
||||
processingMedia := suite.manager.PreProcessMedia(data, accountID, nil)
|
||||
// fetch the attachment id from the processing media
|
||||
attachmentID := processingMedia.AttachmentID()
|
||||
|
||||
|
@ -543,8 +664,7 @@ func (suite *ManagerTestSuite) TestBirdnestMp4ProcessBlocking() {
|
|||
accountID := "01FS1X72SK9ZPW0J1QQ68BD264"
|
||||
|
||||
// process the media with no additional info provided
|
||||
processingMedia, err := suite.manager.ProcessMedia(ctx, data, accountID, nil)
|
||||
suite.NoError(err)
|
||||
processingMedia := suite.manager.PreProcessMedia(data, accountID, nil)
|
||||
// fetch the attachment id from the processing media
|
||||
attachmentID := processingMedia.AttachmentID()
|
||||
|
||||
|
@ -621,13 +741,16 @@ func (suite *ManagerTestSuite) TestNotAnMp4ProcessBlocking() {
|
|||
accountID := "01FS1X72SK9ZPW0J1QQ68BD264"
|
||||
|
||||
// pre processing should go fine but...
|
||||
processingMedia, err := suite.manager.ProcessMedia(ctx, data, accountID, nil)
|
||||
suite.NoError(err)
|
||||
processingMedia := suite.manager.PreProcessMedia(data, accountID, nil)
|
||||
|
||||
// we should get an error while loading
|
||||
attachment, err := processingMedia.LoadAttachment(ctx)
|
||||
suite.EqualError(err, "finish: error decoding video: error determining video metadata: [width height framerate]")
|
||||
suite.Nil(attachment)
|
||||
|
||||
// partial attachment should be
|
||||
// returned, with 'unknown' type.
|
||||
suite.NotNil(attachment)
|
||||
suite.Equal(gtsmodel.FileTypeUnknown, attachment.Type)
|
||||
}
|
||||
|
||||
func (suite *ManagerTestSuite) TestSimpleJpegProcessBlockingNoContentLengthGiven() {
|
||||
|
@ -646,8 +769,7 @@ func (suite *ManagerTestSuite) TestSimpleJpegProcessBlockingNoContentLengthGiven
|
|||
accountID := "01FS1X72SK9ZPW0J1QQ68BD264"
|
||||
|
||||
// process the media with no additional info provided
|
||||
processingMedia, err := suite.manager.ProcessMedia(ctx, data, accountID, nil)
|
||||
suite.NoError(err)
|
||||
processingMedia := suite.manager.PreProcessMedia(data, accountID, nil)
|
||||
// fetch the attachment id from the processing media
|
||||
attachmentID := processingMedia.AttachmentID()
|
||||
|
||||
|
@ -719,8 +841,7 @@ func (suite *ManagerTestSuite) TestSimpleJpegProcessBlockingReadCloser() {
|
|||
accountID := "01FS1X72SK9ZPW0J1QQ68BD264"
|
||||
|
||||
// process the media with no additional info provided
|
||||
processingMedia, err := suite.manager.ProcessMedia(ctx, data, accountID, nil)
|
||||
suite.NoError(err)
|
||||
processingMedia := suite.manager.PreProcessMedia(data, accountID, nil)
|
||||
// fetch the attachment id from the processing media
|
||||
attachmentID := processingMedia.AttachmentID()
|
||||
|
||||
|
@ -791,8 +912,7 @@ func (suite *ManagerTestSuite) TestPngNoAlphaChannelProcessBlocking() {
|
|||
accountID := "01FS1X72SK9ZPW0J1QQ68BD264"
|
||||
|
||||
// process the media with no additional info provided
|
||||
processingMedia, err := suite.manager.ProcessMedia(ctx, data, accountID, nil)
|
||||
suite.NoError(err)
|
||||
processingMedia := suite.manager.PreProcessMedia(data, accountID, nil)
|
||||
// fetch the attachment id from the processing media
|
||||
attachmentID := processingMedia.AttachmentID()
|
||||
|
||||
|
@ -863,8 +983,7 @@ func (suite *ManagerTestSuite) TestPngAlphaChannelProcessBlocking() {
|
|||
accountID := "01FS1X72SK9ZPW0J1QQ68BD264"
|
||||
|
||||
// process the media with no additional info provided
|
||||
processingMedia, err := suite.manager.ProcessMedia(ctx, data, accountID, nil)
|
||||
suite.NoError(err)
|
||||
processingMedia := suite.manager.PreProcessMedia(data, accountID, nil)
|
||||
// fetch the attachment id from the processing media
|
||||
attachmentID := processingMedia.AttachmentID()
|
||||
|
||||
|
@ -935,8 +1054,7 @@ func (suite *ManagerTestSuite) TestSimpleJpegProcessBlockingWithCallback() {
|
|||
accountID := "01FS1X72SK9ZPW0J1QQ68BD264"
|
||||
|
||||
// process the media with no additional info provided
|
||||
processingMedia, err := suite.manager.ProcessMedia(ctx, data, accountID, nil)
|
||||
suite.NoError(err)
|
||||
processingMedia := suite.manager.PreProcessMedia(data, accountID, nil)
|
||||
// fetch the attachment id from the processing media
|
||||
attachmentID := processingMedia.AttachmentID()
|
||||
|
||||
|
@ -992,166 +1110,6 @@ func (suite *ManagerTestSuite) TestSimpleJpegProcessBlockingWithCallback() {
|
|||
suite.Equal(processedThumbnailBytesExpected, processedThumbnailBytes)
|
||||
}
|
||||
|
||||
func (suite *ManagerTestSuite) TestSimpleJpegProcessAsync() {
|
||||
ctx, cncl := context.WithTimeout(context.Background(), time.Second*30)
|
||||
defer cncl()
|
||||
|
||||
data := func(_ context.Context) (io.ReadCloser, int64, error) {
|
||||
// load bytes from a test image
|
||||
b, err := os.ReadFile("./test/test-jpeg.jpg")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return io.NopCloser(bytes.NewBuffer(b)), int64(len(b)), nil
|
||||
}
|
||||
|
||||
accountID := "01FS1X72SK9ZPW0J1QQ68BD264"
|
||||
|
||||
// process the media with no additional info provided
|
||||
processingMedia, err := suite.manager.ProcessMedia(ctx, data, accountID, nil)
|
||||
suite.NoError(err)
|
||||
|
||||
// fetch the attachment id from the processing media
|
||||
attachmentID := processingMedia.AttachmentID()
|
||||
|
||||
// wait for processing to complete
|
||||
var attachment *gtsmodel.MediaAttachment
|
||||
if !testrig.WaitFor(func() bool {
|
||||
attachment, err = suite.db.GetAttachmentByID(ctx, attachmentID)
|
||||
return err == nil && attachment != nil
|
||||
}) {
|
||||
suite.FailNow("timed out waiting for attachment to process")
|
||||
}
|
||||
|
||||
// make sure it's got the stuff set on it that we expect
|
||||
// the attachment ID and accountID we expect
|
||||
suite.Equal(attachmentID, attachment.ID)
|
||||
suite.Equal(accountID, attachment.AccountID)
|
||||
|
||||
// file meta should be correctly derived from the image
|
||||
suite.EqualValues(gtsmodel.Original{
|
||||
Width: 1920, Height: 1080, Size: 2073600, Aspect: 1.7777777777777777,
|
||||
}, attachment.FileMeta.Original)
|
||||
suite.EqualValues(gtsmodel.Small{
|
||||
Width: 512, Height: 288, Size: 147456, Aspect: 1.7777777777777777,
|
||||
}, attachment.FileMeta.Small)
|
||||
suite.Equal("image/jpeg", attachment.File.ContentType)
|
||||
suite.Equal("image/jpeg", attachment.Thumbnail.ContentType)
|
||||
suite.Equal(269739, attachment.File.FileSize)
|
||||
suite.Equal("LiBzRk#6V[WF_NvzV@WY_3rqV@a$", attachment.Blurhash)
|
||||
|
||||
// now make sure the attachment is in the database
|
||||
dbAttachment, err := suite.db.GetAttachmentByID(ctx, attachmentID)
|
||||
suite.NoError(err)
|
||||
suite.NotNil(dbAttachment)
|
||||
|
||||
// make sure the processed file is in storage
|
||||
processedFullBytes, err := suite.storage.Get(ctx, attachment.File.Path)
|
||||
suite.NoError(err)
|
||||
suite.NotEmpty(processedFullBytes)
|
||||
|
||||
// load the processed bytes from our test folder, to compare
|
||||
processedFullBytesExpected, err := os.ReadFile("./test/test-jpeg-processed.jpg")
|
||||
suite.NoError(err)
|
||||
suite.NotEmpty(processedFullBytesExpected)
|
||||
|
||||
// the bytes in storage should be what we expected
|
||||
suite.Equal(processedFullBytesExpected, processedFullBytes)
|
||||
|
||||
// now do the same for the thumbnail and make sure it's what we expected
|
||||
processedThumbnailBytes, err := suite.storage.Get(ctx, attachment.Thumbnail.Path)
|
||||
suite.NoError(err)
|
||||
suite.NotEmpty(processedThumbnailBytes)
|
||||
|
||||
processedThumbnailBytesExpected, err := os.ReadFile("./test/test-jpeg-thumbnail.jpg")
|
||||
suite.NoError(err)
|
||||
suite.NotEmpty(processedThumbnailBytesExpected)
|
||||
|
||||
suite.Equal(processedThumbnailBytesExpected, processedThumbnailBytes)
|
||||
}
|
||||
|
||||
func (suite *ManagerTestSuite) TestSimpleJpegQueueSpamming() {
|
||||
// in this test, we spam the manager queue with 50 new media requests, just to see how it holds up
|
||||
ctx := context.Background()
|
||||
|
||||
b, err := os.ReadFile("./test/test-jpeg.jpg")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
data := func(_ context.Context) (io.ReadCloser, int64, error) {
|
||||
// load bytes from a test image
|
||||
return io.NopCloser(bytes.NewReader(b)), int64(len(b)), nil
|
||||
}
|
||||
|
||||
accountID := "01FS1X72SK9ZPW0J1QQ68BD264"
|
||||
|
||||
spam := 50
|
||||
inProcess := []*media.ProcessingMedia{}
|
||||
for i := 0; i < spam; i++ {
|
||||
// process the media with no additional info provided
|
||||
processingMedia, err := suite.manager.ProcessMedia(ctx, data, accountID, nil)
|
||||
suite.NoError(err)
|
||||
inProcess = append(inProcess, processingMedia)
|
||||
}
|
||||
|
||||
for _, processingMedia := range inProcess {
|
||||
// fetch the attachment id from the processing media
|
||||
attachmentID := processingMedia.AttachmentID()
|
||||
|
||||
// do a blocking call to fetch the attachment
|
||||
attachment, err := processingMedia.LoadAttachment(ctx)
|
||||
suite.NoError(err)
|
||||
suite.NotNil(attachment)
|
||||
|
||||
// make sure it's got the stuff set on it that we expect
|
||||
// the attachment ID and accountID we expect
|
||||
suite.Equal(attachmentID, attachment.ID)
|
||||
suite.Equal(accountID, attachment.AccountID)
|
||||
|
||||
// file meta should be correctly derived from the image
|
||||
suite.EqualValues(gtsmodel.Original{
|
||||
Width: 1920, Height: 1080, Size: 2073600, Aspect: 1.7777777777777777,
|
||||
}, attachment.FileMeta.Original)
|
||||
suite.EqualValues(gtsmodel.Small{
|
||||
Width: 512, Height: 288, Size: 147456, Aspect: 1.7777777777777777,
|
||||
}, attachment.FileMeta.Small)
|
||||
suite.Equal("image/jpeg", attachment.File.ContentType)
|
||||
suite.Equal("image/jpeg", attachment.Thumbnail.ContentType)
|
||||
suite.Equal(269739, attachment.File.FileSize)
|
||||
suite.Equal("LiBzRk#6V[WF_NvzV@WY_3rqV@a$", attachment.Blurhash)
|
||||
|
||||
// now make sure the attachment is in the database
|
||||
dbAttachment, err := suite.db.GetAttachmentByID(ctx, attachmentID)
|
||||
suite.NoError(err)
|
||||
suite.NotNil(dbAttachment)
|
||||
|
||||
// make sure the processed file is in storage
|
||||
processedFullBytes, err := suite.storage.Get(ctx, attachment.File.Path)
|
||||
suite.NoError(err)
|
||||
suite.NotEmpty(processedFullBytes)
|
||||
|
||||
// load the processed bytes from our test folder, to compare
|
||||
processedFullBytesExpected, err := os.ReadFile("./test/test-jpeg-processed.jpg")
|
||||
suite.NoError(err)
|
||||
suite.NotEmpty(processedFullBytesExpected)
|
||||
|
||||
// the bytes in storage should be what we expected
|
||||
suite.Equal(processedFullBytesExpected, processedFullBytes)
|
||||
|
||||
// now do the same for the thumbnail and make sure it's what we expected
|
||||
processedThumbnailBytes, err := suite.storage.Get(ctx, attachment.Thumbnail.Path)
|
||||
suite.NoError(err)
|
||||
suite.NotEmpty(processedThumbnailBytes)
|
||||
|
||||
processedThumbnailBytesExpected, err := os.ReadFile("./test/test-jpeg-thumbnail.jpg")
|
||||
suite.NoError(err)
|
||||
suite.NotEmpty(processedThumbnailBytesExpected)
|
||||
|
||||
suite.Equal(processedThumbnailBytesExpected, processedThumbnailBytes)
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *ManagerTestSuite) TestSimpleJpegProcessBlockingWithDiskStorage() {
|
||||
ctx := context.Background()
|
||||
|
||||
|
@ -1191,8 +1149,7 @@ func (suite *ManagerTestSuite) TestSimpleJpegProcessBlockingWithDiskStorage() {
|
|||
suite.manager = diskManager
|
||||
|
||||
// process the media with no additional info provided
|
||||
processingMedia, err := diskManager.ProcessMedia(ctx, data, accountID, nil)
|
||||
suite.NoError(err)
|
||||
processingMedia := diskManager.PreProcessMedia(data, accountID, nil)
|
||||
// fetch the attachment id from the processing media
|
||||
attachmentID := processingMedia.AttachmentID()
|
||||
|
||||
|
@ -1290,19 +1247,17 @@ func (suite *ManagerTestSuite) TestSmallSizedMediaTypeDetection_issue2263() {
|
|||
accountID := "01FS1X72SK9ZPW0J1QQ68BD264"
|
||||
|
||||
// process the media with no additional info provided
|
||||
processingMedia, err := suite.manager.ProcessMedia(ctx, data, accountID, nil)
|
||||
suite.NoError(err)
|
||||
processingMedia := suite.manager.PreProcessMedia(data, accountID, nil)
|
||||
if _, err := processingMedia.LoadAttachment(ctx); err != nil {
|
||||
suite.FailNow(err.Error())
|
||||
}
|
||||
|
||||
// fetch the attachment id from the processing media
|
||||
attachmentID := processingMedia.AttachmentID()
|
||||
|
||||
// wait for processing to complete
|
||||
var attachment *gtsmodel.MediaAttachment
|
||||
if !testrig.WaitFor(func() bool {
|
||||
attachment, err = suite.db.GetAttachmentByID(ctx, attachmentID)
|
||||
return err == nil && attachment != nil
|
||||
}) {
|
||||
suite.FailNow("timed out waiting for attachment to process")
|
||||
// fetch the attachment id from the processing media
|
||||
attachment, err := suite.db.GetAttachmentByID(ctx, attachmentID)
|
||||
if err != nil {
|
||||
suite.FailNow(err.Error())
|
||||
}
|
||||
|
||||
// make sure it's got the stuff set on it that we expect
|
||||
|
@ -1318,6 +1273,62 @@ func (suite *ManagerTestSuite) TestSmallSizedMediaTypeDetection_issue2263() {
|
|||
}
|
||||
}
|
||||
|
||||
func (suite *ManagerTestSuite) TestMisreportedSmallMedia() {
|
||||
const accountID = "01FS1X72SK9ZPW0J1QQ68BD264"
|
||||
var actualSize int
|
||||
|
||||
data := func(_ context.Context) (io.ReadCloser, int64, error) {
|
||||
// Load bytes from small png.
|
||||
b, err := os.ReadFile("./test/test-png-alphachannel-1x1px.png")
|
||||
if err != nil {
|
||||
suite.FailNow(err.Error())
|
||||
}
|
||||
|
||||
actualSize = len(b)
|
||||
|
||||
// Report media as twice its actual size. This should be corrected.
|
||||
return io.NopCloser(bytes.NewBuffer(b)), int64(2 * actualSize), nil
|
||||
}
|
||||
|
||||
// Process the media with no additional info provided.
|
||||
attachment, err := suite.manager.
|
||||
PreProcessMedia(data, accountID, nil).
|
||||
LoadAttachment(context.Background())
|
||||
if err != nil {
|
||||
suite.FailNow(err.Error())
|
||||
}
|
||||
|
||||
suite.Equal(actualSize, attachment.File.FileSize)
|
||||
}
|
||||
|
||||
func (suite *ManagerTestSuite) TestNoReportedSizeSmallMedia() {
|
||||
const accountID = "01FS1X72SK9ZPW0J1QQ68BD264"
|
||||
var actualSize int
|
||||
|
||||
data := func(_ context.Context) (io.ReadCloser, int64, error) {
|
||||
// Load bytes from small png.
|
||||
b, err := os.ReadFile("./test/test-png-alphachannel-1x1px.png")
|
||||
if err != nil {
|
||||
suite.FailNow(err.Error())
|
||||
}
|
||||
|
||||
actualSize = len(b)
|
||||
|
||||
// Return zero for media size. This should be detected.
|
||||
return io.NopCloser(bytes.NewBuffer(b)), 0, nil
|
||||
}
|
||||
|
||||
// Process the media with no additional info provided.
|
||||
attachment, err := suite.manager.
|
||||
PreProcessMedia(data, accountID, nil).
|
||||
LoadAttachment(context.Background())
|
||||
if err != nil {
|
||||
suite.FailNow(err.Error())
|
||||
}
|
||||
|
||||
suite.Equal(actualSize, attachment.File.FileSize)
|
||||
}
|
||||
|
||||
func TestManagerTestSuite(t *testing.T) {
|
||||
suite.Run(t, &ManagerTestSuite{})
|
||||
}
|
||||
|
|
|
@ -20,7 +20,6 @@ package media
|
|||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"codeberg.org/gruf/go-bytesize"
|
||||
|
@ -33,6 +32,7 @@ import (
|
|||
"github.com/superseriousbusiness/gotosocial/internal/log"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/regexes"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/uris"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/util"
|
||||
)
|
||||
|
||||
// ProcessingEmoji represents an emoji currently processing. It exposes
|
||||
|
@ -156,14 +156,51 @@ func (p *ProcessingEmoji) store(ctx context.Context) error {
|
|||
}
|
||||
}()
|
||||
|
||||
// Byte buffer to read file header into.
|
||||
// See: https://en.wikipedia.org/wiki/File_format#File_header
|
||||
// and https://github.com/h2non/filetype
|
||||
hdrBuf := make([]byte, 261)
|
||||
var maxSize bytesize.Size
|
||||
|
||||
// Read the first 261 header bytes into buffer.
|
||||
if _, err := io.ReadFull(rc, hdrBuf); err != nil {
|
||||
return gtserror.Newf("error reading incoming media: %w", err)
|
||||
if p.emoji.Domain == "" {
|
||||
// this is a local emoji upload
|
||||
maxSize = config.GetMediaEmojiLocalMaxSize()
|
||||
} else {
|
||||
// this is a remote incoming emoji
|
||||
maxSize = config.GetMediaEmojiRemoteMaxSize()
|
||||
}
|
||||
|
||||
// Check that provided size isn't beyond max. We check beforehand
|
||||
// so that we don't attempt to stream the emoji into storage if not needed.
|
||||
if size := bytesize.Size(sz); sz > 0 && size > maxSize {
|
||||
return gtserror.Newf("given emoji size %s greater than max allowed %s", size, maxSize)
|
||||
}
|
||||
|
||||
// Prepare to read bytes from
|
||||
// file header or magic number.
|
||||
fileSize := int(sz)
|
||||
hdrBuf := newHdrBuf(fileSize)
|
||||
|
||||
// Read into buffer as much as possible.
|
||||
//
|
||||
// UnexpectedEOF means we couldn't read up to the
|
||||
// given size, but we may still have read something.
|
||||
//
|
||||
// EOF means we couldn't read anything at all.
|
||||
//
|
||||
// Any other error likely means the connection messed up.
|
||||
//
|
||||
// In other words, rather counterintuitively, we
|
||||
// can only proceed on no error or unexpected error!
|
||||
n, err := io.ReadFull(rc, hdrBuf)
|
||||
if err != nil {
|
||||
if err != io.ErrUnexpectedEOF {
|
||||
return gtserror.Newf("error reading first bytes of incoming media: %w", err)
|
||||
}
|
||||
|
||||
// Initial file size was misreported, so we didn't read
|
||||
// fully into hdrBuf. Reslice it to the size we did read.
|
||||
log.Warnf(ctx,
|
||||
"recovered from misreported file size; reported %d; read %d",
|
||||
fileSize, n,
|
||||
)
|
||||
hdrBuf = hdrBuf[:n]
|
||||
}
|
||||
|
||||
// Parse file type info from header buffer.
|
||||
|
@ -184,24 +221,7 @@ func (p *ProcessingEmoji) store(ctx context.Context) error {
|
|||
// Recombine header bytes with remaining stream
|
||||
r := io.MultiReader(bytes.NewReader(hdrBuf), rc)
|
||||
|
||||
var maxSize bytesize.Size
|
||||
|
||||
if p.emoji.Domain == "" {
|
||||
// this is a local emoji upload
|
||||
maxSize = config.GetMediaEmojiLocalMaxSize()
|
||||
} else {
|
||||
// this is a remote incoming emoji
|
||||
maxSize = config.GetMediaEmojiRemoteMaxSize()
|
||||
}
|
||||
|
||||
// Check that provided size isn't beyond max. We check beforehand
|
||||
// so that we don't attempt to stream the emoji into storage if not needed.
|
||||
if size := bytesize.Size(sz); sz > 0 && size > maxSize {
|
||||
return gtserror.Newf("given emoji size %s greater than max allowed %s", size, maxSize)
|
||||
}
|
||||
|
||||
var pathID string
|
||||
|
||||
if p.newPathID != "" {
|
||||
// This is a refreshed emoji with a new
|
||||
// path ID that this will be stored under.
|
||||
|
@ -215,11 +235,10 @@ func (p *ProcessingEmoji) store(ctx context.Context) error {
|
|||
instanceAccID := regexes.FilePath.FindStringSubmatch(p.emoji.ImageStaticPath)[1]
|
||||
|
||||
// Calculate emoji file path.
|
||||
p.emoji.ImagePath = fmt.Sprintf(
|
||||
"%s/%s/%s/%s.%s",
|
||||
p.emoji.ImagePath = uris.StoragePathForAttachment(
|
||||
instanceAccID,
|
||||
TypeEmoji,
|
||||
SizeOriginal,
|
||||
string(TypeEmoji),
|
||||
string(SizeOriginal),
|
||||
pathID,
|
||||
info.Extension,
|
||||
)
|
||||
|
@ -235,14 +254,13 @@ func (p *ProcessingEmoji) store(ctx context.Context) error {
|
|||
}
|
||||
|
||||
// Write the final image reader stream to our storage.
|
||||
sz, err = p.mgr.state.Storage.PutStream(ctx, p.emoji.ImagePath, r)
|
||||
wroteSize, err := p.mgr.state.Storage.PutStream(ctx, p.emoji.ImagePath, r)
|
||||
if err != nil {
|
||||
return gtserror.Newf("error writing emoji to storage: %w", err)
|
||||
}
|
||||
|
||||
// Once again check size in case none was provided previously.
|
||||
if size := bytesize.Size(sz); size > maxSize {
|
||||
|
||||
if size := bytesize.Size(wroteSize); size > maxSize {
|
||||
if err := p.mgr.state.Storage.Delete(ctx, p.emoji.ImagePath); err != nil {
|
||||
log.Errorf(ctx, "error removing too-large-emoji from storage: %v", err)
|
||||
}
|
||||
|
@ -251,7 +269,7 @@ func (p *ProcessingEmoji) store(ctx context.Context) error {
|
|||
}
|
||||
|
||||
// Fill in remaining attachment data now it's stored.
|
||||
p.emoji.ImageURL = uris.GenerateURIForAttachment(
|
||||
p.emoji.ImageURL = uris.URIForAttachment(
|
||||
instanceAccID,
|
||||
string(TypeEmoji),
|
||||
string(SizeOriginal),
|
||||
|
@ -259,11 +277,8 @@ func (p *ProcessingEmoji) store(ctx context.Context) error {
|
|||
info.Extension,
|
||||
)
|
||||
p.emoji.ImageContentType = info.MIME.Value
|
||||
p.emoji.ImageFileSize = int(sz)
|
||||
p.emoji.Cached = func() *bool {
|
||||
ok := true
|
||||
return &ok
|
||||
}()
|
||||
p.emoji.ImageFileSize = int(wroteSize)
|
||||
p.emoji.Cached = util.Ptr(true)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -20,12 +20,12 @@ package media
|
|||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"errors"
|
||||
"image/jpeg"
|
||||
"io"
|
||||
"time"
|
||||
|
||||
"codeberg.org/gruf/go-errors/v2"
|
||||
errorsv2 "codeberg.org/gruf/go-errors/v2"
|
||||
"codeberg.org/gruf/go-runners"
|
||||
"github.com/disintegration/imaging"
|
||||
"github.com/h2non/filetype"
|
||||
|
@ -33,11 +33,14 @@ import (
|
|||
"github.com/superseriousbusiness/gotosocial/internal/gtserror"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/gtsmodel"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/log"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/storage"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/uris"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/util"
|
||||
)
|
||||
|
||||
// ProcessingMedia represents a piece of media that is currently being processed. It exposes
|
||||
// various functions for retrieving data from the process.
|
||||
// ProcessingMedia represents a piece of media
|
||||
// currently being processed. It exposes functions
|
||||
// for retrieving data from the process.
|
||||
type ProcessingMedia struct {
|
||||
media *gtsmodel.MediaAttachment // processing media attachment details
|
||||
dataFn DataFunc // load-data function, returns media stream
|
||||
|
@ -48,39 +51,56 @@ type ProcessingMedia struct {
|
|||
mgr *Manager // mgr instance (access to db / storage)
|
||||
}
|
||||
|
||||
// AttachmentID returns the ID of the underlying media attachment without blocking processing.
|
||||
// AttachmentID returns the ID of the underlying
|
||||
// media attachment without blocking processing.
|
||||
func (p *ProcessingMedia) AttachmentID() string {
|
||||
return p.media.ID // immutable, safe outside mutex.
|
||||
}
|
||||
|
||||
// LoadAttachment blocks until the thumbnail and fullsize content has been processed, and then returns the completed attachment.
|
||||
// LoadAttachment blocks until the thumbnail and
|
||||
// fullsize content has been processed, and then
|
||||
// returns the attachment.
|
||||
//
|
||||
// If processing could not be completed fully
|
||||
// then an error will be returned. The attachment
|
||||
// will still be returned in that case, but it will
|
||||
// only be partially complete and should be treated
|
||||
// as a placeholder.
|
||||
func (p *ProcessingMedia) LoadAttachment(ctx context.Context) (*gtsmodel.MediaAttachment, error) {
|
||||
// Attempt to load synchronously.
|
||||
media, done, err := p.load(ctx)
|
||||
|
||||
if err == nil {
|
||||
// No issue, return media.
|
||||
return media, nil
|
||||
}
|
||||
|
||||
if !done {
|
||||
// Provided context was cancelled, e.g. request cancelled
|
||||
// early. Queue this item for asynchronous processing.
|
||||
// Provided context was cancelled,
|
||||
// e.g. request aborted early before
|
||||
// its context could be used to finish
|
||||
// loading the attachment. Enqueue for
|
||||
// asynchronous processing, which will
|
||||
// use a background context.
|
||||
log.Warnf(ctx, "reprocessing media %s after canceled ctx", p.media.ID)
|
||||
go p.mgr.state.Workers.Media.Enqueue(p.Process)
|
||||
}
|
||||
|
||||
return nil, err
|
||||
// Media could not be retrieved FULLY,
|
||||
// but partial attachment should be present.
|
||||
return media, err
|
||||
}
|
||||
|
||||
// Process allows the receiving object to fit the runners.WorkerFunc signature. It performs a (blocking) load and logs on error.
|
||||
// Process allows the receiving object to fit the
|
||||
// runners.WorkerFunc signature. It performs a
|
||||
// (blocking) load and logs on error.
|
||||
func (p *ProcessingMedia) Process(ctx context.Context) {
|
||||
if _, _, err := p.load(ctx); err != nil {
|
||||
log.Errorf(ctx, "error processing media: %v", err)
|
||||
log.Errorf(ctx, "error(s) processing media: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// load performs a concurrency-safe load of ProcessingMedia, only marking itself as complete when returned error is NOT a context cancel.
|
||||
// load performs a concurrency-safe load of ProcessingMedia, only
|
||||
// marking itself as complete when returned error is NOT a context cancel.
|
||||
func (p *ProcessingMedia) load(ctx context.Context) (*gtsmodel.MediaAttachment, bool, error) {
|
||||
var (
|
||||
done bool
|
||||
|
@ -95,7 +115,7 @@ func (p *ProcessingMedia) load(ctx context.Context) (*gtsmodel.MediaAttachment,
|
|||
|
||||
defer func() {
|
||||
// This is only done when ctx NOT cancelled.
|
||||
done = err == nil || !errors.Comparable(err,
|
||||
done = err == nil || !errorsv2.Comparable(err,
|
||||
context.Canceled,
|
||||
context.DeadlineExceeded,
|
||||
)
|
||||
|
@ -109,34 +129,61 @@ func (p *ProcessingMedia) load(ctx context.Context) (*gtsmodel.MediaAttachment,
|
|||
p.err = err
|
||||
}()
|
||||
|
||||
// Gather errors as we proceed.
|
||||
var errs = gtserror.NewMultiError(4)
|
||||
|
||||
// Attempt to store media and calculate
|
||||
// full-size media attachment details.
|
||||
if err = p.store(ctx); err != nil {
|
||||
return err
|
||||
//
|
||||
// This will update p.media as it goes.
|
||||
storeErr := p.store(ctx)
|
||||
if storeErr != nil {
|
||||
errs.Append(storeErr)
|
||||
}
|
||||
|
||||
// Finish processing by reloading media into
|
||||
// memory to get dimension and generate a thumb.
|
||||
if err = p.finish(ctx); err != nil {
|
||||
return err
|
||||
//
|
||||
// This will update p.media as it goes.
|
||||
if finishErr := p.finish(ctx); finishErr != nil {
|
||||
errs.Append(finishErr)
|
||||
}
|
||||
|
||||
if p.recache {
|
||||
// Existing attachment we're recaching, so only update.
|
||||
err = p.mgr.state.DB.UpdateAttachment(ctx, p.media)
|
||||
return err
|
||||
// If this isn't a file we were able to process,
|
||||
// we may have partially stored it (eg., it's a
|
||||
// jpeg, which is fine, but streaming it to storage
|
||||
// was interrupted halfway through and so it was
|
||||
// never decoded). Try to clean up in this case.
|
||||
if p.media.Type == gtsmodel.FileTypeUnknown {
|
||||
deleteErr := p.mgr.state.Storage.Delete(ctx, p.media.File.Path)
|
||||
if deleteErr != nil && !errors.Is(deleteErr, storage.ErrNotFound) {
|
||||
errs.Append(deleteErr)
|
||||
}
|
||||
}
|
||||
|
||||
// First time caching this attachment, insert it.
|
||||
err = p.mgr.state.DB.PutAttachment(ctx, p.media)
|
||||
var dbErr error
|
||||
switch {
|
||||
case !p.recache:
|
||||
// First time caching this attachment, insert it.
|
||||
dbErr = p.mgr.state.DB.PutAttachment(ctx, p.media)
|
||||
|
||||
case p.recache && len(errs) == 0:
|
||||
// Existing attachment we're recaching, update it.
|
||||
//
|
||||
// (We only want to update if everything went OK so far,
|
||||
// otherwise we'd better leave previous version alone.)
|
||||
dbErr = p.mgr.state.DB.UpdateAttachment(ctx, p.media)
|
||||
}
|
||||
|
||||
if dbErr != nil {
|
||||
errs.Append(dbErr)
|
||||
}
|
||||
|
||||
err = errs.Combine()
|
||||
return err
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, done, err
|
||||
}
|
||||
|
||||
return p.media, done, nil
|
||||
return p.media, done, err
|
||||
}
|
||||
|
||||
// store calls the data function attached to p if it hasn't been called yet,
|
||||
|
@ -156,17 +203,47 @@ func (p *ProcessingMedia) store(ctx context.Context) error {
|
|||
}
|
||||
}()
|
||||
|
||||
// Byte buffer to read file header into.
|
||||
// See: https://en.wikipedia.org/wiki/File_format#File_header
|
||||
// and https://github.com/h2non/filetype
|
||||
hdrBuf := make([]byte, 261)
|
||||
// Assume we're given correct file
|
||||
// size, we can overwrite this later
|
||||
// once we know THE TRUTH.
|
||||
fileSize := int(sz)
|
||||
p.media.File.FileSize = fileSize
|
||||
|
||||
// Read the first 261 header bytes into buffer as much as possible.
|
||||
if _, err := rc.Read(hdrBuf); err != nil {
|
||||
return gtserror.Newf("error reading incoming media: %w", err)
|
||||
// Prepare to read bytes from
|
||||
// file header or magic number.
|
||||
hdrBuf := newHdrBuf(fileSize)
|
||||
|
||||
// Read into buffer as much as possible.
|
||||
//
|
||||
// UnexpectedEOF means we couldn't read up to the
|
||||
// given size, but we may still have read something.
|
||||
//
|
||||
// EOF means we couldn't read anything at all.
|
||||
//
|
||||
// Any other error likely means the connection messed up.
|
||||
//
|
||||
// In other words, rather counterintuitively, we
|
||||
// can only proceed on no error or unexpected error!
|
||||
n, err := io.ReadFull(rc, hdrBuf)
|
||||
if err != nil {
|
||||
if err != io.ErrUnexpectedEOF {
|
||||
return gtserror.Newf("error reading first bytes of incoming media: %w", err)
|
||||
}
|
||||
|
||||
// Initial file size was misreported, so we didn't read
|
||||
// fully into hdrBuf. Reslice it to the size we did read.
|
||||
log.Warnf(ctx,
|
||||
"recovered from misreported file size; reported %d; read %d",
|
||||
fileSize, n,
|
||||
)
|
||||
hdrBuf = hdrBuf[:n]
|
||||
fileSize = n
|
||||
p.media.File.FileSize = fileSize
|
||||
}
|
||||
|
||||
// Parse file type info from header buffer.
|
||||
// This should only ever error if the buffer
|
||||
// is empty (ie., the attachment is 0 bytes).
|
||||
info, err := filetype.Match(hdrBuf)
|
||||
if err != nil {
|
||||
return gtserror.Newf("error parsing file type: %w", err)
|
||||
|
@ -175,38 +252,77 @@ func (p *ProcessingMedia) store(ctx context.Context) error {
|
|||
// Recombine header bytes with remaining stream
|
||||
r := io.MultiReader(bytes.NewReader(hdrBuf), rc)
|
||||
|
||||
// Assume we'll put
|
||||
// this file in storage.
|
||||
store := true
|
||||
|
||||
switch info.Extension {
|
||||
case "mp4":
|
||||
p.media.Type = gtsmodel.FileTypeVideo
|
||||
// No problem.
|
||||
|
||||
case "gif":
|
||||
p.media.Type = gtsmodel.FileTypeImage
|
||||
// No problem
|
||||
|
||||
case "jpg", "jpeg", "png", "webp":
|
||||
p.media.Type = gtsmodel.FileTypeImage
|
||||
if sz > 0 {
|
||||
// A file size was provided so we can clean exif data from image.
|
||||
r, err = terminator.Terminate(r, int(sz), info.Extension)
|
||||
if fileSize > 0 {
|
||||
// A file size was provided so we can clean
|
||||
// exif data from image as we're streaming it.
|
||||
r, err = terminator.Terminate(r, fileSize, info.Extension)
|
||||
if err != nil {
|
||||
return gtserror.Newf("error cleaning exif data: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
return gtserror.Newf("unsupported file type: %s", info.Extension)
|
||||
// The file is not a supported format that
|
||||
// we can process, so we can't do much with it.
|
||||
log.Warnf(ctx,
|
||||
"media extension '%s' not officially supported, will be processed as "+
|
||||
"type '%s' with minimal metadata, and will not be cached locally",
|
||||
info.Extension, gtsmodel.FileTypeUnknown,
|
||||
)
|
||||
|
||||
// Don't bother storing this.
|
||||
store = false
|
||||
}
|
||||
|
||||
// Calculate attachment file path.
|
||||
p.media.File.Path = fmt.Sprintf(
|
||||
"%s/%s/%s/%s.%s",
|
||||
// Fill in correct attachment
|
||||
// data now we're parsed it.
|
||||
p.media.URL = uris.URIForAttachment(
|
||||
p.media.AccountID,
|
||||
TypeAttachment,
|
||||
SizeOriginal,
|
||||
string(TypeAttachment),
|
||||
string(SizeOriginal),
|
||||
p.media.ID,
|
||||
info.Extension,
|
||||
)
|
||||
|
||||
// This shouldn't already exist, but we do a check as it's worth logging.
|
||||
// Prefer discovered mime type, fall back to
|
||||
// generic "this contains some bytes" type.
|
||||
mime := info.MIME.Value
|
||||
if mime == "" {
|
||||
mime = "application/octet-stream"
|
||||
}
|
||||
p.media.File.ContentType = mime
|
||||
|
||||
// Calculate attachment file path.
|
||||
p.media.File.Path = uris.StoragePathForAttachment(
|
||||
p.media.AccountID,
|
||||
string(TypeAttachment),
|
||||
string(SizeOriginal),
|
||||
p.media.ID,
|
||||
info.Extension,
|
||||
)
|
||||
|
||||
// We should only try to store the file if it's
|
||||
// a format we can keep processing, otherwise be
|
||||
// a bit cheeky: don't store it and let users
|
||||
// click through to the remote server instead.
|
||||
if !store {
|
||||
return nil
|
||||
}
|
||||
|
||||
// File shouldn't already exist in storage at this point,
|
||||
// but we do a check as it's worth logging / cleaning up.
|
||||
if have, _ := p.mgr.state.Storage.Has(ctx, p.media.File.Path); have {
|
||||
log.Warnf(ctx, "media already exists at storage path: %s", p.media.File.Path)
|
||||
|
||||
|
@ -216,59 +332,99 @@ func (p *ProcessingMedia) store(ctx context.Context) error {
|
|||
}
|
||||
}
|
||||
|
||||
// Write the final image reader stream to our storage.
|
||||
sz, err = p.mgr.state.Storage.PutStream(ctx, p.media.File.Path, r)
|
||||
// Write the final reader stream to our storage.
|
||||
wroteSize, err := p.mgr.state.Storage.PutStream(ctx, p.media.File.Path, r)
|
||||
if err != nil {
|
||||
return gtserror.Newf("error writing media to storage: %w", err)
|
||||
}
|
||||
|
||||
// Set written image size.
|
||||
p.media.File.FileSize = int(sz)
|
||||
// Set actual written size
|
||||
// as authoritative file size.
|
||||
p.media.File.FileSize = int(wroteSize)
|
||||
|
||||
// Fill in remaining attachment data now it's stored.
|
||||
p.media.URL = uris.GenerateURIForAttachment(
|
||||
p.media.AccountID,
|
||||
string(TypeAttachment),
|
||||
string(SizeOriginal),
|
||||
p.media.ID,
|
||||
info.Extension,
|
||||
)
|
||||
p.media.File.ContentType = info.MIME.Value
|
||||
p.media.Cached = func() *bool {
|
||||
ok := true
|
||||
return &ok
|
||||
}()
|
||||
// We can now consider this cached.
|
||||
p.media.Cached = util.Ptr(true)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *ProcessingMedia) finish(ctx context.Context) error {
|
||||
// Fetch a stream to the original file in storage.
|
||||
// Make a jolly assumption about thumbnail type.
|
||||
p.media.Thumbnail.ContentType = mimeImageJpeg
|
||||
|
||||
// Calculate attachment thumbnail file path
|
||||
p.media.Thumbnail.Path = uris.StoragePathForAttachment(
|
||||
p.media.AccountID,
|
||||
string(TypeAttachment),
|
||||
string(SizeSmall),
|
||||
p.media.ID,
|
||||
// Always encode attachment
|
||||
// thumbnails as jpg.
|
||||
"jpg",
|
||||
)
|
||||
|
||||
// Calculate attachment thumbnail serve path.
|
||||
p.media.Thumbnail.URL = uris.URIForAttachment(
|
||||
p.media.AccountID,
|
||||
string(TypeAttachment),
|
||||
string(SizeSmall),
|
||||
p.media.ID,
|
||||
// Always encode attachment
|
||||
// thumbnails as jpg.
|
||||
"jpg",
|
||||
)
|
||||
|
||||
// If original file hasn't been stored, there's
|
||||
// likely something wrong with the data, or we
|
||||
// don't want to store it. Skip everything else.
|
||||
if !*p.media.Cached {
|
||||
p.media.Processing = gtsmodel.ProcessingStatusProcessed
|
||||
return nil
|
||||
}
|
||||
|
||||
// Get a stream to the original file for further processing.
|
||||
rc, err := p.mgr.state.Storage.GetStream(ctx, p.media.File.Path)
|
||||
if err != nil {
|
||||
return gtserror.Newf("error loading file from storage: %w", err)
|
||||
}
|
||||
defer rc.Close()
|
||||
|
||||
// fullImg is the processed version of
|
||||
// the original (stripped + reoriented).
|
||||
var fullImg *gtsImage
|
||||
|
||||
// Depending on the content type, we
|
||||
// can do various types of decoding.
|
||||
switch p.media.File.ContentType {
|
||||
|
||||
// .jpeg, .gif, .webp image type
|
||||
case mimeImageJpeg, mimeImageGif, mimeImageWebp:
|
||||
fullImg, err = decodeImage(rc, imaging.AutoOrientation(true))
|
||||
fullImg, err = decodeImage(
|
||||
rc,
|
||||
imaging.AutoOrientation(true),
|
||||
)
|
||||
if err != nil {
|
||||
return gtserror.Newf("error decoding image: %w", err)
|
||||
}
|
||||
|
||||
// Mark as no longer unknown type now
|
||||
// we know for sure we can decode it.
|
||||
p.media.Type = gtsmodel.FileTypeImage
|
||||
|
||||
// .png image (requires ancillary chunk stripping)
|
||||
case mimeImagePng:
|
||||
fullImg, err = decodeImage(&pngAncillaryChunkStripper{
|
||||
Reader: rc,
|
||||
}, imaging.AutoOrientation(true))
|
||||
fullImg, err = decodeImage(
|
||||
&pngAncillaryChunkStripper{Reader: rc},
|
||||
imaging.AutoOrientation(true),
|
||||
)
|
||||
if err != nil {
|
||||
return gtserror.Newf("error decoding image: %w", err)
|
||||
}
|
||||
|
||||
// Mark as no longer unknown type now
|
||||
// we know for sure we can decode it.
|
||||
p.media.Type = gtsmodel.FileTypeImage
|
||||
|
||||
// .mp4 video type
|
||||
case mimeVideoMp4:
|
||||
video, err := decodeVideoFrame(rc)
|
||||
|
@ -283,9 +439,14 @@ func (p *ProcessingMedia) finish(ctx context.Context) error {
|
|||
p.media.FileMeta.Original.Duration = &video.duration
|
||||
p.media.FileMeta.Original.Framerate = &video.framerate
|
||||
p.media.FileMeta.Original.Bitrate = &video.bitrate
|
||||
|
||||
// Mark as no longer unknown type now
|
||||
// we know for sure we can decode it.
|
||||
p.media.Type = gtsmodel.FileTypeVideo
|
||||
}
|
||||
|
||||
// The image should be in-memory by now.
|
||||
// fullImg should be in-memory by
|
||||
// now so we're done with storage.
|
||||
if err := rc.Close(); err != nil {
|
||||
return gtserror.Newf("error closing file: %w", err)
|
||||
}
|
||||
|
@ -296,15 +457,6 @@ func (p *ProcessingMedia) finish(ctx context.Context) error {
|
|||
p.media.FileMeta.Original.Size = int(fullImg.Size())
|
||||
p.media.FileMeta.Original.Aspect = fullImg.AspectRatio()
|
||||
|
||||
// Calculate attachment thumbnail file path
|
||||
p.media.Thumbnail.Path = fmt.Sprintf(
|
||||
"%s/%s/%s/%s.jpg",
|
||||
p.media.AccountID,
|
||||
TypeAttachment,
|
||||
SizeSmall,
|
||||
p.media.ID,
|
||||
)
|
||||
|
||||
// Get smaller thumbnail image
|
||||
thumbImg := fullImg.Thumbnail()
|
||||
|
||||
|
@ -312,16 +464,20 @@ func (p *ProcessingMedia) finish(ctx context.Context) error {
|
|||
// now take our large son.
|
||||
fullImg = nil
|
||||
|
||||
// Blurhash needs generating from thumb.
|
||||
hash, err := thumbImg.Blurhash()
|
||||
if err != nil {
|
||||
return gtserror.Newf("error generating blurhash: %w", err)
|
||||
// Only generate blurhash
|
||||
// from thumb if necessary.
|
||||
if p.media.Blurhash == "" {
|
||||
hash, err := thumbImg.Blurhash()
|
||||
if err != nil {
|
||||
return gtserror.Newf("error generating blurhash: %w", err)
|
||||
}
|
||||
|
||||
// Set the attachment blurhash.
|
||||
p.media.Blurhash = hash
|
||||
}
|
||||
|
||||
// Set the attachment blurhash.
|
||||
p.media.Blurhash = hash
|
||||
|
||||
// This shouldn't already exist, but we do a check as it's worth logging.
|
||||
// Thumbnail shouldn't already exist in storage at this point,
|
||||
// but we do a check as it's worth logging / cleaning up.
|
||||
if have, _ := p.mgr.state.Storage.Has(ctx, p.media.Thumbnail.Path); have {
|
||||
log.Warnf(ctx, "thumbnail already exists at storage path: %s", p.media.Thumbnail.Path)
|
||||
|
||||
|
@ -333,7 +489,9 @@ func (p *ProcessingMedia) finish(ctx context.Context) error {
|
|||
|
||||
// Create a thumbnail JPEG encoder stream.
|
||||
enc := thumbImg.ToJPEG(&jpeg.Options{
|
||||
Quality: 70, // enough for a thumbnail.
|
||||
// Good enough for
|
||||
// a thumbnail.
|
||||
Quality: 70,
|
||||
})
|
||||
|
||||
// Stream-encode the JPEG thumbnail image into storage.
|
||||
|
@ -342,16 +500,6 @@ func (p *ProcessingMedia) finish(ctx context.Context) error {
|
|||
return gtserror.Newf("error stream-encoding thumbnail to storage: %w", err)
|
||||
}
|
||||
|
||||
// Fill in remaining thumbnail now it's stored
|
||||
p.media.Thumbnail.ContentType = mimeImageJpeg
|
||||
p.media.Thumbnail.URL = uris.GenerateURIForAttachment(
|
||||
p.media.AccountID,
|
||||
string(TypeAttachment),
|
||||
string(SizeSmall),
|
||||
p.media.ID,
|
||||
"jpg", // always jpeg
|
||||
)
|
||||
|
||||
// Set thumbnail dimensions in attachment info.
|
||||
p.media.FileMeta.Small = gtsmodel.Small{
|
||||
Width: int(thumbImg.Width()),
|
||||
|
|
Binary file not shown.
|
@ -44,9 +44,6 @@ const (
|
|||
mimeVideoMp4 = mimeVideo + "/" + mimeMp4
|
||||
)
|
||||
|
||||
// EmojiMaxBytes is the maximum permitted bytes of an emoji upload (50kb)
|
||||
// const EmojiMaxBytes = 51200
|
||||
|
||||
type Size string
|
||||
|
||||
const (
|
||||
|
|
42
internal/media/util.go
Normal file
42
internal/media/util.go
Normal file
|
@ -0,0 +1,42 @@
|
|||
// GoToSocial
|
||||
// Copyright (C) GoToSocial Authors admin@gotosocial.org
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
package media
|
||||
|
||||
// newHdrBuf returns a buffer of suitable size to
|
||||
// read bytes from a file header or magic number.
|
||||
//
|
||||
// File header is *USUALLY* 261 bytes at the start
|
||||
// of a file; magic number can be much less than
|
||||
// that (just a few bytes).
|
||||
//
|
||||
// To cover both cases, this function returns a buffer
|
||||
// suitable for whichever is smallest: the first 261
|
||||
// bytes of the file, or the whole file.
|
||||
//
|
||||
// See:
|
||||
//
|
||||
// - https://en.wikipedia.org/wiki/File_format#File_header
|
||||
// - https://github.com/h2non/filetype.
|
||||
func newHdrBuf(fileSize int) []byte {
|
||||
bufSize := 261
|
||||
if fileSize > 0 && fileSize < bufSize {
|
||||
bufSize = fileSize
|
||||
}
|
||||
|
||||
return make([]byte, bufSize)
|
||||
}
|
|
@ -33,6 +33,7 @@ import (
|
|||
"github.com/superseriousbusiness/gotosocial/internal/messages"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/text"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/typeutils"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/util"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/validate"
|
||||
)
|
||||
|
||||
|
@ -281,57 +282,76 @@ func (p *Processor) Update(ctx context.Context, account *gtsmodel.Account, form
|
|||
return acctSensitive, nil
|
||||
}
|
||||
|
||||
// UpdateAvatar does the dirty work of checking the avatar part of an account update form,
|
||||
// parsing and checking the image, and doing the necessary updates in the database for this to become
|
||||
// the account's new avatar image.
|
||||
func (p *Processor) UpdateAvatar(ctx context.Context, avatar *multipart.FileHeader, description *string, accountID string) (*gtsmodel.MediaAttachment, error) {
|
||||
// UpdateAvatar does the dirty work of checking the avatar
|
||||
// part of an account update form, parsing and checking the
|
||||
// media, and doing the necessary updates in the database
|
||||
// for this to become the account's new avatar.
|
||||
func (p *Processor) UpdateAvatar(
|
||||
ctx context.Context,
|
||||
avatar *multipart.FileHeader,
|
||||
description *string,
|
||||
accountID string,
|
||||
) (*gtsmodel.MediaAttachment, error) {
|
||||
maxImageSize := config.GetMediaImageMaxSize()
|
||||
if avatar.Size > int64(maxImageSize) {
|
||||
return nil, fmt.Errorf("UpdateAvatar: avatar with size %d exceeded max image size of %d bytes", avatar.Size, maxImageSize)
|
||||
return nil, gtserror.Newf("size %d exceeded max media size of %d bytes", avatar.Size, maxImageSize)
|
||||
}
|
||||
|
||||
dataFunc := func(innerCtx context.Context) (io.ReadCloser, int64, error) {
|
||||
data := func(innerCtx context.Context) (io.ReadCloser, int64, error) {
|
||||
f, err := avatar.Open()
|
||||
return f, avatar.Size, err
|
||||
}
|
||||
|
||||
isAvatar := true
|
||||
ai := &media.AdditionalMediaInfo{
|
||||
Avatar: &isAvatar,
|
||||
// Process the media attachment and load it immediately.
|
||||
media := p.mediaManager.PreProcessMedia(data, accountID, &media.AdditionalMediaInfo{
|
||||
Avatar: util.Ptr(true),
|
||||
Description: description,
|
||||
}
|
||||
})
|
||||
|
||||
processingMedia, err := p.mediaManager.PreProcessMedia(ctx, dataFunc, accountID, ai)
|
||||
attachment, err := media.LoadAttachment(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("UpdateAvatar: error processing avatar: %s", err)
|
||||
return nil, gtserror.NewErrorUnprocessableEntity(err, err.Error())
|
||||
} else if attachment.Type == gtsmodel.FileTypeUnknown {
|
||||
err = gtserror.Newf("could not process uploaded file with extension %s", attachment.File.ContentType)
|
||||
return nil, gtserror.NewErrorUnprocessableEntity(err, err.Error())
|
||||
}
|
||||
|
||||
return processingMedia.LoadAttachment(ctx)
|
||||
return attachment, nil
|
||||
}
|
||||
|
||||
// UpdateHeader does the dirty work of checking the header part of an account update form,
|
||||
// parsing and checking the image, and doing the necessary updates in the database for this to become
|
||||
// the account's new header image.
|
||||
func (p *Processor) UpdateHeader(ctx context.Context, header *multipart.FileHeader, description *string, accountID string) (*gtsmodel.MediaAttachment, error) {
|
||||
// UpdateHeader does the dirty work of checking the header
|
||||
// part of an account update form, parsing and checking the
|
||||
// media, and doing the necessary updates in the database
|
||||
// for this to become the account's new header.
|
||||
func (p *Processor) UpdateHeader(
|
||||
ctx context.Context,
|
||||
header *multipart.FileHeader,
|
||||
description *string,
|
||||
accountID string,
|
||||
) (*gtsmodel.MediaAttachment, error) {
|
||||
maxImageSize := config.GetMediaImageMaxSize()
|
||||
if header.Size > int64(maxImageSize) {
|
||||
return nil, fmt.Errorf("UpdateHeader: header with size %d exceeded max image size of %d bytes", header.Size, maxImageSize)
|
||||
return nil, gtserror.Newf("size %d exceeded max media size of %d bytes", header.Size, maxImageSize)
|
||||
}
|
||||
|
||||
dataFunc := func(innerCtx context.Context) (io.ReadCloser, int64, error) {
|
||||
data := func(innerCtx context.Context) (io.ReadCloser, int64, error) {
|
||||
f, err := header.Open()
|
||||
return f, header.Size, err
|
||||
}
|
||||
|
||||
isHeader := true
|
||||
ai := &media.AdditionalMediaInfo{
|
||||
Header: &isHeader,
|
||||
}
|
||||
// Process the media attachment and load it immediately.
|
||||
media := p.mediaManager.PreProcessMedia(data, accountID, &media.AdditionalMediaInfo{
|
||||
Header: util.Ptr(true),
|
||||
Description: description,
|
||||
})
|
||||
|
||||
processingMedia, err := p.mediaManager.PreProcessMedia(ctx, dataFunc, accountID, ai)
|
||||
attachment, err := media.LoadAttachment(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("UpdateHeader: error processing header: %s", err)
|
||||
return nil, gtserror.NewErrorUnprocessableEntity(err, err.Error())
|
||||
} else if attachment.Type == gtsmodel.FileTypeUnknown {
|
||||
err = gtserror.Newf("could not process uploaded file with extension %s", attachment.File.ContentType)
|
||||
return nil, gtserror.NewErrorUnprocessableEntity(err, err.Error())
|
||||
}
|
||||
|
||||
return processingMedia.LoadAttachment(ctx)
|
||||
return attachment, nil
|
||||
}
|
||||
|
|
|
@ -55,7 +55,7 @@ func (p *Processor) EmojiCreate(ctx context.Context, account *gtsmodel.Account,
|
|||
return nil, gtserror.NewErrorInternalError(fmt.Errorf("error creating id for new emoji: %s", err), "error creating emoji ID")
|
||||
}
|
||||
|
||||
emojiURI := uris.GenerateURIForEmoji(emojiID)
|
||||
emojiURI := uris.URIForEmoji(emojiID)
|
||||
|
||||
data := func(innerCtx context.Context) (io.ReadCloser, int64, error) {
|
||||
f, err := form.Image.Open()
|
||||
|
@ -335,7 +335,7 @@ func (p *Processor) emojiUpdateCopy(ctx context.Context, emoji *gtsmodel.Emoji,
|
|||
return nil, gtserror.NewErrorInternalError(err)
|
||||
}
|
||||
|
||||
newEmojiURI := uris.GenerateURIForEmoji(newEmojiID)
|
||||
newEmojiURI := uris.URIForEmoji(newEmojiID)
|
||||
|
||||
data := func(ctx context.Context) (reader io.ReadCloser, fileSize int64, err error) {
|
||||
rc, err := p.state.Storage.GetStream(ctx, emoji.ImagePath)
|
||||
|
|
|
@ -42,18 +42,18 @@ func (p *Processor) Create(ctx context.Context, account *gtsmodel.Account, form
|
|||
}
|
||||
|
||||
// process the media attachment and load it immediately
|
||||
media, err := p.mediaManager.PreProcessMedia(ctx, data, account.ID, &media.AdditionalMediaInfo{
|
||||
media := p.mediaManager.PreProcessMedia(data, account.ID, &media.AdditionalMediaInfo{
|
||||
Description: &form.Description,
|
||||
FocusX: &focusX,
|
||||
FocusY: &focusY,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, gtserror.NewErrorUnprocessableEntity(err)
|
||||
}
|
||||
|
||||
attachment, err := media.LoadAttachment(ctx)
|
||||
if err != nil {
|
||||
return nil, gtserror.NewErrorUnprocessableEntity(err)
|
||||
return nil, gtserror.NewErrorUnprocessableEntity(err, err.Error())
|
||||
} else if attachment.Type == gtsmodel.FileTypeUnknown {
|
||||
err = gtserror.Newf("could not process uploaded file with extension %s", attachment.File.ContentType)
|
||||
return nil, gtserror.NewErrorUnprocessableEntity(err, err.Error())
|
||||
}
|
||||
|
||||
apiAttachment, err := p.converter.AttachmentToAPIAttachment(ctx, attachment)
|
||||
|
|
|
@ -23,17 +23,24 @@ import (
|
|||
"io"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
apimodel "github.com/superseriousbusiness/gotosocial/internal/api/model"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/gtscontext"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/gtserror"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/gtsmodel"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/media"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/storage"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/uris"
|
||||
)
|
||||
|
||||
// GetFile retrieves a file from storage and streams it back to the caller via an io.reader embedded in *apimodel.Content.
|
||||
func (p *Processor) GetFile(ctx context.Context, requestingAccount *gtsmodel.Account, form *apimodel.GetContentRequestForm) (*apimodel.Content, gtserror.WithCode) {
|
||||
// GetFile retrieves a file from storage and streams it back
|
||||
// to the caller via an io.reader embedded in *apimodel.Content.
|
||||
func (p *Processor) GetFile(
|
||||
ctx context.Context,
|
||||
requestingAccount *gtsmodel.Account,
|
||||
form *apimodel.GetContentRequestForm,
|
||||
) (*apimodel.Content, gtserror.WithCode) {
|
||||
// parse the form fields
|
||||
mediaSize, err := parseSize(form.MediaSize)
|
||||
if err != nil {
|
||||
|
@ -118,11 +125,35 @@ func (p *Processor) getAttachmentContent(ctx context.Context, requestingAccount
|
|||
// retrieve attachment from the database and do basic checks on it
|
||||
a, err := p.state.DB.GetAttachmentByID(ctx, wantedMediaID)
|
||||
if err != nil {
|
||||
return nil, gtserror.NewErrorNotFound(fmt.Errorf("attachment %s could not be taken from the db: %w", wantedMediaID, err))
|
||||
err = gtserror.Newf("attachment %s could not be taken from the db: %w", wantedMediaID, err)
|
||||
return nil, gtserror.NewErrorNotFound(err)
|
||||
}
|
||||
|
||||
if a.AccountID != owningAccountID {
|
||||
return nil, gtserror.NewErrorNotFound(fmt.Errorf("attachment %s is not owned by %s", wantedMediaID, owningAccountID))
|
||||
err = gtserror.Newf("attachment %s is not owned by %s", wantedMediaID, owningAccountID)
|
||||
return nil, gtserror.NewErrorNotFound(err)
|
||||
}
|
||||
|
||||
// If this is an "Unknown" file type, ie., one we
|
||||
// tried to process and couldn't, or one we refused
|
||||
// to process because it wasn't supported, then we
|
||||
// can skip a lot of steps here by simply forwarding
|
||||
// the request to the remote URL.
|
||||
if a.Type == gtsmodel.FileTypeUnknown {
|
||||
remoteURL, err := url.Parse(a.RemoteURL)
|
||||
if err != nil {
|
||||
err = gtserror.Newf("error parsing remote URL of 'Unknown'-type attachment for redirection: %w", err)
|
||||
return nil, gtserror.NewErrorInternalError(err)
|
||||
}
|
||||
|
||||
url := &storage.PresignedURL{
|
||||
URL: remoteURL,
|
||||
// We might manage to cache the media
|
||||
// at some point, so set a low-ish expiry.
|
||||
Expiry: time.Now().Add(2 * time.Hour),
|
||||
}
|
||||
|
||||
return &apimodel.Content{URL: url}, nil
|
||||
}
|
||||
|
||||
if !*a.Cached {
|
||||
|
@ -205,7 +236,7 @@ func (p *Processor) getEmojiContent(ctx context.Context, fileName string, owning
|
|||
// for using the static URL rather than full size url
|
||||
// is that static emojis are always encoded as png,
|
||||
// so this is more reliable than using full size url
|
||||
imageStaticURL := uris.GenerateURIForAttachment(
|
||||
imageStaticURL := uris.URIForAttachment(
|
||||
owningAccountID,
|
||||
string(media.TypeEmoji),
|
||||
string(media.SizeStatic),
|
||||
|
|
|
@ -36,8 +36,12 @@ func (p *Processor) Get(ctx context.Context, requestingAccount *gtsmodel.Account
|
|||
return p.c.GetAPIStatus(ctx, requestingAccount, targetStatus)
|
||||
}
|
||||
|
||||
// ContextGet returns the context (previous and following posts) from the given status ID.
|
||||
func (p *Processor) ContextGet(ctx context.Context, requestingAccount *gtsmodel.Account, targetStatusID string) (*apimodel.Context, gtserror.WithCode) {
|
||||
func (p *Processor) contextGet(
|
||||
ctx context.Context,
|
||||
requestingAccount *gtsmodel.Account,
|
||||
targetStatusID string,
|
||||
convert func(context.Context, *gtsmodel.Status, *gtsmodel.Account) (*apimodel.Status, error),
|
||||
) (*apimodel.Context, gtserror.WithCode) {
|
||||
targetStatus, errWithCode := p.c.GetVisibleTargetStatus(ctx, requestingAccount, targetStatusID)
|
||||
if errWithCode != nil {
|
||||
return nil, errWithCode
|
||||
|
@ -55,7 +59,7 @@ func (p *Processor) ContextGet(ctx context.Context, requestingAccount *gtsmodel.
|
|||
|
||||
for _, status := range parents {
|
||||
if v, err := p.filter.StatusVisible(ctx, requestingAccount, status); err == nil && v {
|
||||
apiStatus, err := p.converter.StatusToAPIStatus(ctx, status, requestingAccount)
|
||||
apiStatus, err := convert(ctx, status, requestingAccount)
|
||||
if err == nil {
|
||||
context.Ancestors = append(context.Ancestors, *apiStatus)
|
||||
}
|
||||
|
@ -73,7 +77,7 @@ func (p *Processor) ContextGet(ctx context.Context, requestingAccount *gtsmodel.
|
|||
|
||||
for _, status := range children {
|
||||
if v, err := p.filter.StatusVisible(ctx, requestingAccount, status); err == nil && v {
|
||||
apiStatus, err := p.converter.StatusToAPIStatus(ctx, status, requestingAccount)
|
||||
apiStatus, err := convert(ctx, status, requestingAccount)
|
||||
if err == nil {
|
||||
context.Descendants = append(context.Descendants, *apiStatus)
|
||||
}
|
||||
|
@ -82,3 +86,16 @@ func (p *Processor) ContextGet(ctx context.Context, requestingAccount *gtsmodel.
|
|||
|
||||
return context, nil
|
||||
}
|
||||
|
||||
// ContextGet returns the context (previous and following posts) from the given status ID.
|
||||
func (p *Processor) ContextGet(ctx context.Context, requestingAccount *gtsmodel.Account, targetStatusID string) (*apimodel.Context, gtserror.WithCode) {
|
||||
return p.contextGet(ctx, requestingAccount, targetStatusID, p.converter.StatusToAPIStatus)
|
||||
}
|
||||
|
||||
// WebContextGet is like ContextGet, but is explicitly
|
||||
// for viewing statuses via the unauthenticated web UI.
|
||||
//
|
||||
// TODO: a more advanced threading model could be implemented here.
|
||||
func (p *Processor) WebContextGet(ctx context.Context, targetStatusID string) (*apimodel.Context, gtserror.WithCode) {
|
||||
return p.contextGet(ctx, nil, targetStatusID, p.converter.StatusToWebStatus)
|
||||
}
|
||||
|
|
|
@ -319,7 +319,7 @@ func (cr *customRenderer) handleHashtag(text string) string {
|
|||
// `<a href="https://example.org/tags/somehashtag" class="mention hashtag" rel="tag">#<span>SomeHashtag</span></a>`
|
||||
var b strings.Builder
|
||||
b.WriteString(`<a href="`)
|
||||
b.WriteString(uris.GenerateURIForTag(normalized))
|
||||
b.WriteString(uris.URIForTag(normalized))
|
||||
b.WriteString(`" class="mention hashtag" rel="tag">#<span>`)
|
||||
b.WriteString(normalized)
|
||||
b.WriteString(`</span></a>`)
|
||||
|
|
|
@ -40,7 +40,7 @@ func (suite *PruneTestSuite) TestPrune() {
|
|||
|
||||
pruned, err := suite.state.Timelines.Home.Prune(ctx, testAccountID, desiredPreparedItemsLength, desiredIndexedItemsLength)
|
||||
suite.NoError(err)
|
||||
suite.Equal(15, pruned)
|
||||
suite.Equal(16, pruned)
|
||||
suite.Equal(5, suite.state.Timelines.Home.GetIndexedLength(ctx, testAccountID))
|
||||
}
|
||||
|
||||
|
@ -56,7 +56,7 @@ func (suite *PruneTestSuite) TestPruneTwice() {
|
|||
|
||||
pruned, err := suite.state.Timelines.Home.Prune(ctx, testAccountID, desiredPreparedItemsLength, desiredIndexedItemsLength)
|
||||
suite.NoError(err)
|
||||
suite.Equal(15, pruned)
|
||||
suite.Equal(16, pruned)
|
||||
suite.Equal(5, suite.state.Timelines.Home.GetIndexedLength(ctx, testAccountID))
|
||||
|
||||
// Prune same again, nothing should be pruned this time.
|
||||
|
@ -78,7 +78,7 @@ func (suite *PruneTestSuite) TestPruneTo0() {
|
|||
|
||||
pruned, err := suite.state.Timelines.Home.Prune(ctx, testAccountID, desiredPreparedItemsLength, desiredIndexedItemsLength)
|
||||
suite.NoError(err)
|
||||
suite.Equal(20, pruned)
|
||||
suite.Equal(21, pruned)
|
||||
suite.Equal(0, suite.state.Timelines.Home.GetIndexedLength(ctx, testAccountID))
|
||||
}
|
||||
|
||||
|
@ -95,7 +95,7 @@ func (suite *PruneTestSuite) TestPruneToInfinityAndBeyond() {
|
|||
pruned, err := suite.state.Timelines.Home.Prune(ctx, testAccountID, desiredPreparedItemsLength, desiredIndexedItemsLength)
|
||||
suite.NoError(err)
|
||||
suite.Equal(0, pruned)
|
||||
suite.Equal(20, suite.state.Timelines.Home.GetIndexedLength(ctx, testAccountID))
|
||||
suite.Equal(21, suite.state.Timelines.Home.GetIndexedLength(ctx, testAccountID))
|
||||
}
|
||||
|
||||
func TestPruneTestSuite(t *testing.T) {
|
||||
|
|
|
@ -954,7 +954,7 @@ func (c *Converter) TagToAS(ctx context.Context, t *gtsmodel.Tag) (vocab.TootHas
|
|||
// This is probably already lowercase,
|
||||
// but let's err on the safe side.
|
||||
nameLower := strings.ToLower(t.Name)
|
||||
tagURLString := uris.GenerateURIForTag(nameLower)
|
||||
tagURLString := uris.URIForTag(nameLower)
|
||||
|
||||
// Create the tag.
|
||||
tag := streams.NewTootHashtag()
|
||||
|
|
|
@ -434,11 +434,14 @@ func (c *Converter) AppToAPIAppPublic(ctx context.Context, a *gtsmodel.Applicati
|
|||
// AttachmentToAPIAttachment converts a gts model media attacahment into its api representation for serialization on the API.
|
||||
func (c *Converter) AttachmentToAPIAttachment(ctx context.Context, a *gtsmodel.MediaAttachment) (apimodel.Attachment, error) {
|
||||
apiAttachment := apimodel.Attachment{
|
||||
ID: a.ID,
|
||||
Type: strings.ToLower(string(a.Type)),
|
||||
TextURL: a.URL,
|
||||
PreviewURL: a.Thumbnail.URL,
|
||||
Meta: apimodel.MediaMeta{
|
||||
ID: a.ID,
|
||||
Type: strings.ToLower(string(a.Type)),
|
||||
}
|
||||
|
||||
// Don't try to serialize meta for
|
||||
// unknown attachments, there's no point.
|
||||
if a.Type != gtsmodel.FileTypeUnknown {
|
||||
apiAttachment.Meta = &apimodel.MediaMeta{
|
||||
Original: apimodel.MediaDimensions{
|
||||
Width: a.FileMeta.Original.Width,
|
||||
Height: a.FileMeta.Original.Height,
|
||||
|
@ -449,13 +452,20 @@ func (c *Converter) AttachmentToAPIAttachment(ctx context.Context, a *gtsmodel.M
|
|||
Size: strconv.Itoa(a.FileMeta.Small.Width) + "x" + strconv.Itoa(a.FileMeta.Small.Height),
|
||||
Aspect: float32(a.FileMeta.Small.Aspect),
|
||||
},
|
||||
},
|
||||
Blurhash: a.Blurhash,
|
||||
}
|
||||
}
|
||||
|
||||
if i := a.Blurhash; i != "" {
|
||||
apiAttachment.Blurhash = &i
|
||||
}
|
||||
|
||||
// nullable fields
|
||||
if i := a.URL; i != "" {
|
||||
apiAttachment.URL = &i
|
||||
apiAttachment.TextURL = &i
|
||||
}
|
||||
|
||||
if i := a.Thumbnail.URL; i != "" {
|
||||
apiAttachment.PreviewURL = &i
|
||||
}
|
||||
|
||||
if i := a.RemoteURL; i != "" {
|
||||
|
@ -470,8 +480,9 @@ func (c *Converter) AttachmentToAPIAttachment(ctx context.Context, a *gtsmodel.M
|
|||
apiAttachment.Description = &i
|
||||
}
|
||||
|
||||
// type specific fields
|
||||
// Type-specific fields.
|
||||
switch a.Type {
|
||||
|
||||
case gtsmodel.FileTypeImage:
|
||||
apiAttachment.Meta.Original.Size = strconv.Itoa(a.FileMeta.Original.Width) + "x" + strconv.Itoa(a.FileMeta.Original.Height)
|
||||
apiAttachment.Meta.Original.Aspect = float32(a.FileMeta.Original.Aspect)
|
||||
|
@ -479,16 +490,17 @@ func (c *Converter) AttachmentToAPIAttachment(ctx context.Context, a *gtsmodel.M
|
|||
X: a.FileMeta.Focus.X,
|
||||
Y: a.FileMeta.Focus.Y,
|
||||
}
|
||||
|
||||
case gtsmodel.FileTypeVideo:
|
||||
if i := a.FileMeta.Original.Duration; i != nil {
|
||||
apiAttachment.Meta.Original.Duration = *i
|
||||
}
|
||||
|
||||
if i := a.FileMeta.Original.Framerate; i != nil {
|
||||
// the masto api expects this as a string in
|
||||
// the format `integer/1`, so 30fps is `30/1`
|
||||
// The masto api expects this as a string in
|
||||
// the format `integer/1`, so 30fps is `30/1`.
|
||||
round := math.Round(float64(*i))
|
||||
fr := strconv.FormatInt(int64(round), 10)
|
||||
fr := strconv.Itoa(int(round))
|
||||
apiAttachment.Meta.Original.FrameRate = fr + "/1"
|
||||
}
|
||||
|
||||
|
@ -599,7 +611,7 @@ func (c *Converter) EmojiCategoryToAPIEmojiCategory(ctx context.Context, categor
|
|||
func (c *Converter) TagToAPITag(ctx context.Context, t *gtsmodel.Tag, stubHistory bool) (apimodel.Tag, error) {
|
||||
return apimodel.Tag{
|
||||
Name: strings.ToLower(t.Name),
|
||||
URL: uris.GenerateURIForTag(t.Name),
|
||||
URL: uris.URIForTag(t.Name),
|
||||
History: func() *[]any {
|
||||
if !stubHistory {
|
||||
return nil
|
||||
|
@ -611,15 +623,56 @@ func (c *Converter) TagToAPITag(ctx context.Context, t *gtsmodel.Tag, stubHistor
|
|||
}, nil
|
||||
}
|
||||
|
||||
// StatusToAPIStatus converts a gts model status into its api (frontend) representation for serialization on the API.
|
||||
// StatusToAPIStatus converts a gts model status into its api
|
||||
// (frontend) representation for serialization on the API.
|
||||
//
|
||||
// Requesting account can be nil.
|
||||
func (c *Converter) StatusToAPIStatus(ctx context.Context, s *gtsmodel.Status, requestingAccount *gtsmodel.Account) (*apimodel.Status, error) {
|
||||
func (c *Converter) StatusToAPIStatus(
|
||||
ctx context.Context,
|
||||
s *gtsmodel.Status,
|
||||
requestingAccount *gtsmodel.Account,
|
||||
) (*apimodel.Status, error) {
|
||||
apiStatus, err := c.statusToFrontend(ctx, s, requestingAccount)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Normalize status for the API by pruning
|
||||
// out unknown attachment types and replacing
|
||||
// them with a helpful message.
|
||||
var aside string
|
||||
aside, apiStatus.MediaAttachments = placeholdUnknownAttachments(apiStatus.MediaAttachments)
|
||||
apiStatus.Content += aside
|
||||
|
||||
return apiStatus, nil
|
||||
}
|
||||
|
||||
// StatusToWebStatus converts a gts model status into an
|
||||
// api representation suitable for serving into a web template.
|
||||
//
|
||||
// Requesting account can be nil.
|
||||
func (c *Converter) StatusToWebStatus(
|
||||
ctx context.Context,
|
||||
s *gtsmodel.Status,
|
||||
requestingAccount *gtsmodel.Account,
|
||||
) (*apimodel.Status, error) {
|
||||
return c.statusToFrontend(ctx, s, requestingAccount)
|
||||
}
|
||||
|
||||
// statusToFrontend is a package internal function for
|
||||
// parsing a status into its initial frontend representation.
|
||||
//
|
||||
// Requesting account can be nil.
|
||||
func (c *Converter) statusToFrontend(
|
||||
ctx context.Context,
|
||||
s *gtsmodel.Status,
|
||||
requestingAccount *gtsmodel.Account,
|
||||
) (*apimodel.Status, error) {
|
||||
if err := c.state.DB.PopulateStatus(ctx, s); err != nil {
|
||||
// Ensure author account present + correct;
|
||||
// can't really go further without this!
|
||||
if s.Account == nil {
|
||||
return nil, fmt.Errorf("error(s) populating status, cannot continue: %w", err)
|
||||
return nil, gtserror.Newf("error(s) populating status, cannot continue: %w", err)
|
||||
}
|
||||
|
||||
log.Errorf(ctx, "error(s) populating status, will continue: %v", err)
|
||||
|
@ -627,22 +680,22 @@ func (c *Converter) StatusToAPIStatus(ctx context.Context, s *gtsmodel.Status, r
|
|||
|
||||
apiAuthorAccount, err := c.AccountToAPIAccountPublic(ctx, s.Account)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error converting status author: %w", err)
|
||||
return nil, gtserror.Newf("error converting status author: %w", err)
|
||||
}
|
||||
|
||||
repliesCount, err := c.state.DB.CountStatusReplies(ctx, s.ID)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error counting replies: %w", err)
|
||||
return nil, gtserror.Newf("error counting replies: %w", err)
|
||||
}
|
||||
|
||||
reblogsCount, err := c.state.DB.CountStatusBoosts(ctx, s.ID)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error counting reblogs: %w", err)
|
||||
return nil, gtserror.Newf("error counting reblogs: %w", err)
|
||||
}
|
||||
|
||||
favesCount, err := c.state.DB.CountStatusFaves(ctx, s.ID)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error counting faves: %w", err)
|
||||
return nil, gtserror.Newf("error counting faves: %w", err)
|
||||
}
|
||||
|
||||
interacts, err := c.interactionsWithStatusForAccount(ctx, s, requestingAccount)
|
||||
|
@ -722,7 +775,7 @@ func (c *Converter) StatusToAPIStatus(ctx context.Context, s *gtsmodel.Status, r
|
|||
if s.BoostOf != nil {
|
||||
apiBoostOf, err := c.StatusToAPIStatus(ctx, s.BoostOf, requestingAccount)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error converting boosted status: %w", err)
|
||||
return nil, gtserror.Newf("error converting boosted status: %w", err)
|
||||
}
|
||||
|
||||
apiStatus.Reblog = &apimodel.StatusReblogged{Status: apiBoostOf}
|
||||
|
@ -733,13 +786,13 @@ func (c *Converter) StatusToAPIStatus(ctx context.Context, s *gtsmodel.Status, r
|
|||
if app == nil {
|
||||
app, err = c.state.DB.GetApplicationByID(ctx, appID)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error getting application %s: %w", appID, err)
|
||||
return nil, gtserror.Newf("error getting application %s: %w", appID, err)
|
||||
}
|
||||
}
|
||||
|
||||
apiApp, err := c.AppToAPIAppPublic(ctx, app)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error converting application %s: %w", appID, err)
|
||||
return nil, gtserror.Newf("error converting application %s: %w", appID, err)
|
||||
}
|
||||
|
||||
apiStatus.Application = apiApp
|
||||
|
@ -757,11 +810,9 @@ func (c *Converter) StatusToAPIStatus(ctx context.Context, s *gtsmodel.Status, r
|
|||
}
|
||||
}
|
||||
|
||||
// Normalization.
|
||||
|
||||
// If web URL is empty for whatever
|
||||
// reason, provide AP URI as fallback.
|
||||
if s.URL == "" {
|
||||
// URL was empty for some reason;
|
||||
// provide AP URI as fallback.
|
||||
s.URL = s.URI
|
||||
}
|
||||
|
||||
|
|
|
@ -344,7 +344,7 @@ func (suite *InternalToFrontendTestSuite) TestStatusToFrontend() {
|
|||
"language": "en",
|
||||
"uri": "http://localhost:8080/users/admin/statuses/01F8MH75CBF9JFX4ZAD54N0W0R",
|
||||
"url": "http://localhost:8080/@admin/statuses/01F8MH75CBF9JFX4ZAD54N0W0R",
|
||||
"replies_count": 0,
|
||||
"replies_count": 1,
|
||||
"reblogs_count": 0,
|
||||
"favourites_count": 1,
|
||||
"favourited": true,
|
||||
|
@ -437,6 +437,105 @@ func (suite *InternalToFrontendTestSuite) TestStatusToFrontend() {
|
|||
}`, string(b))
|
||||
}
|
||||
|
||||
func (suite *InternalToFrontendTestSuite) TestStatusToFrontendUnknownAttachments() {
|
||||
testStatus := suite.testStatuses["remote_account_2_status_1"]
|
||||
requestingAccount := suite.testAccounts["admin_account"]
|
||||
|
||||
apiStatus, err := suite.typeconverter.StatusToAPIStatus(context.Background(), testStatus, requestingAccount)
|
||||
suite.NoError(err)
|
||||
|
||||
b, err := json.MarshalIndent(apiStatus, "", " ")
|
||||
suite.NoError(err)
|
||||
|
||||
suite.Equal(`{
|
||||
"id": "01HE7XJ1CG84TBKH5V9XKBVGF5",
|
||||
"created_at": "2023-11-02T10:44:25.000Z",
|
||||
"in_reply_to_id": "01F8MH75CBF9JFX4ZAD54N0W0R",
|
||||
"in_reply_to_account_id": "01F8MH17FWEB39HZJ76B6VXSKF",
|
||||
"sensitive": false,
|
||||
"spoiler_text": "",
|
||||
"visibility": "public",
|
||||
"language": "en",
|
||||
"uri": "http://example.org/users/Some_User/statuses/01HE7XJ1CG84TBKH5V9XKBVGF5",
|
||||
"url": "http://example.org/@Some_User/statuses/01HE7XJ1CG84TBKH5V9XKBVGF5",
|
||||
"replies_count": 0,
|
||||
"reblogs_count": 0,
|
||||
"favourites_count": 0,
|
||||
"favourited": false,
|
||||
"reblogged": false,
|
||||
"muted": false,
|
||||
"bookmarked": false,
|
||||
"pinned": false,
|
||||
"content": "\u003cp\u003ehi \u003cspan class=\"h-card\"\u003e\u003ca href=\"http://localhost:8080/@admin\" class=\"u-url mention\" rel=\"nofollow noreferrer noopener\" target=\"_blank\"\u003e@\u003cspan\u003eadmin\u003c/span\u003e\u003c/a\u003e\u003c/span\u003e here's some media for ya\u003c/p\u003e\u003caside\u003e\u003cp\u003eNote from localhost:8080: 2 attachments in this status could not be downloaded. Treat the following external links with care:\u003cul\u003e\u003cli\u003e\u003ca href=\"http://example.org/fileserver/01HE7Y659ZWZ02JM4AWYJZ176Q/attachment/original/01HE7ZGJYTSYMXF927GF9353KR.svg\" rel=\"nofollow noreferrer noopener\" target=\"_blank\"\u003e01HE7ZGJYTSYMXF927GF9353KR.svg\u003c/a\u003e [SVG line art of a sloth, public domain]\u003c/li\u003e\u003cli\u003e\u003ca href=\"http://example.org/fileserver/01HE7Y659ZWZ02JM4AWYJZ176Q/attachment/original/01HE892Y8ZS68TQCNPX7J888P3.mp3\" rel=\"nofollow noreferrer noopener\" target=\"_blank\"\u003e01HE892Y8ZS68TQCNPX7J888P3.mp3\u003c/a\u003e [Jolly salsa song, public domain.]\u003c/li\u003e\u003c/ul\u003e\u003c/p\u003e\u003c/aside\u003e",
|
||||
"reblog": null,
|
||||
"account": {
|
||||
"id": "01FHMQX3GAABWSM0S2VZEC2SWC",
|
||||
"username": "Some_User",
|
||||
"acct": "Some_User@example.org",
|
||||
"display_name": "some user",
|
||||
"locked": true,
|
||||
"discoverable": true,
|
||||
"bot": false,
|
||||
"created_at": "2020-08-10T12:13:28.000Z",
|
||||
"note": "i'm a real son of a gun",
|
||||
"url": "http://example.org/@Some_User",
|
||||
"avatar": "",
|
||||
"avatar_static": "",
|
||||
"header": "http://localhost:8080/assets/default_header.png",
|
||||
"header_static": "http://localhost:8080/assets/default_header.png",
|
||||
"followers_count": 0,
|
||||
"following_count": 0,
|
||||
"statuses_count": 1,
|
||||
"last_status_at": "2023-11-02T10:44:25.000Z",
|
||||
"emojis": [],
|
||||
"fields": []
|
||||
},
|
||||
"media_attachments": [
|
||||
{
|
||||
"id": "01HE7Y3C432WRSNS10EZM86SA5",
|
||||
"type": "image",
|
||||
"url": "http://localhost:8080/fileserver/01FHMQX3GAABWSM0S2VZEC2SWC/attachment/original/01HE7Y3C432WRSNS10EZM86SA5.jpg",
|
||||
"text_url": "http://localhost:8080/fileserver/01FHMQX3GAABWSM0S2VZEC2SWC/attachment/original/01HE7Y3C432WRSNS10EZM86SA5.jpg",
|
||||
"preview_url": "http://localhost:8080/fileserver/01FHMQX3GAABWSM0S2VZEC2SWC/attachment/small/01HE7Y3C432WRSNS10EZM86SA5.jpg",
|
||||
"remote_url": "http://example.org/fileserver/01HE7Y659ZWZ02JM4AWYJZ176Q/attachment/original/01HE7Y6G0EMCKST3Q0914WW0MS.jpg",
|
||||
"preview_remote_url": null,
|
||||
"meta": {
|
||||
"original": {
|
||||
"width": 3000,
|
||||
"height": 2000,
|
||||
"size": "3000x2000",
|
||||
"aspect": 1.5
|
||||
},
|
||||
"small": {
|
||||
"width": 512,
|
||||
"height": 341,
|
||||
"size": "512x341",
|
||||
"aspect": 1.5014663
|
||||
},
|
||||
"focus": {
|
||||
"x": 0,
|
||||
"y": 0
|
||||
}
|
||||
},
|
||||
"description": "Photograph of a sloth, Public Domain.",
|
||||
"blurhash": "LNEC{|w}0K9GsEtPM|j[NFbHoeof"
|
||||
}
|
||||
],
|
||||
"mentions": [
|
||||
{
|
||||
"id": "01F8MH17FWEB39HZJ76B6VXSKF",
|
||||
"username": "admin",
|
||||
"url": "http://localhost:8080/@admin",
|
||||
"acct": "admin"
|
||||
}
|
||||
],
|
||||
"tags": [],
|
||||
"emojis": [],
|
||||
"card": null,
|
||||
"poll": null
|
||||
}`, string(b))
|
||||
}
|
||||
|
||||
func (suite *InternalToFrontendTestSuite) TestStatusToFrontendUnknownLanguage() {
|
||||
testStatus := >smodel.Status{}
|
||||
*testStatus = *suite.testStatuses["admin_account_status_1"]
|
||||
|
@ -459,7 +558,7 @@ func (suite *InternalToFrontendTestSuite) TestStatusToFrontendUnknownLanguage()
|
|||
"language": null,
|
||||
"uri": "http://localhost:8080/users/admin/statuses/01F8MH75CBF9JFX4ZAD54N0W0R",
|
||||
"url": "http://localhost:8080/@admin/statuses/01F8MH75CBF9JFX4ZAD54N0W0R",
|
||||
"replies_count": 0,
|
||||
"replies_count": 1,
|
||||
"reblogs_count": 0,
|
||||
"favourites_count": 1,
|
||||
"favourited": true,
|
||||
|
@ -583,7 +682,8 @@ func (suite *InternalToFrontendTestSuite) TestVideoAttachmentToFrontend() {
|
|||
"aspect": 1.7821782
|
||||
}
|
||||
},
|
||||
"description": "A cow adorably licking another cow!"
|
||||
"description": "A cow adorably licking another cow!",
|
||||
"blurhash": null
|
||||
}`, string(b))
|
||||
}
|
||||
|
||||
|
|
|
@ -22,10 +22,17 @@ import (
|
|||
"errors"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"path"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/superseriousbusiness/gotosocial/internal/ap"
|
||||
apimodel "github.com/superseriousbusiness/gotosocial/internal/api/model"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/config"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/gtsmodel"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/regexes"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/text"
|
||||
)
|
||||
|
||||
type statusInteractions struct {
|
||||
|
@ -100,3 +107,80 @@ func getURI(withID ap.WithJSONLDId) (*url.URL, string, error) {
|
|||
id := idProp.Get()
|
||||
return id, id.String(), nil
|
||||
}
|
||||
|
||||
// placeholdUnknownAttachments separates any attachments with type `unknown`
|
||||
// out of the given slice, and returns an `<aside>` tag containing links to
|
||||
// those attachments, as well as the slice of remaining "known" attachments.
|
||||
// If there are no unknown-type attachments in the provided slice, an empty
|
||||
// string and the original slice will be returned.
|
||||
//
|
||||
// If an aside is created, it will be run through the sanitizer before being
|
||||
// returned, to ensure that malicious links don't cause issues.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// <aside>
|
||||
// <p>Note from your.instance.com: 2 attachments in this status could not be downloaded. Treat the following external links with care:
|
||||
// <ul>
|
||||
// <li><a href="http://example.org/fileserver/01HE7Y659ZWZ02JM4AWYJZ176Q/attachment/original/01HE7ZGJYTSYMXF927GF9353KR.svg" rel="nofollow noreferrer noopener" target="_blank">01HE7ZGJYTSYMXF927GF9353KR.svg</a> [SVG line art of a sloth, public domain]</li>
|
||||
// <li><a href="http://example.org/fileserver/01HE7Y659ZWZ02JM4AWYJZ176Q/attachment/original/01HE892Y8ZS68TQCNPX7J888P3.mp3" rel="nofollow noreferrer noopener" target="_blank">01HE892Y8ZS68TQCNPX7J888P3.mp3</a> [Jolly salsa song, public domain.]</li>
|
||||
// </ul>
|
||||
// </p>
|
||||
// </aside>
|
||||
func placeholdUnknownAttachments(arr []apimodel.Attachment) (string, []apimodel.Attachment) {
|
||||
// Extract unknown-type attachments into a separate
|
||||
// slice, deleting them from arr in the process.
|
||||
var unknowns []apimodel.Attachment
|
||||
arr = slices.DeleteFunc(arr, func(elem apimodel.Attachment) bool {
|
||||
unknown := elem.Type == "unknown"
|
||||
if unknown {
|
||||
// Set aside unknown-type attachment.
|
||||
unknowns = append(unknowns, elem)
|
||||
}
|
||||
|
||||
return unknown
|
||||
})
|
||||
|
||||
unknownsLen := len(unknowns)
|
||||
if unknownsLen == 0 {
|
||||
// No unknown attachments,
|
||||
// nothing to do.
|
||||
return "", arr
|
||||
}
|
||||
|
||||
// Plural / singular.
|
||||
var (
|
||||
attachments string
|
||||
links string
|
||||
)
|
||||
|
||||
if unknownsLen == 1 {
|
||||
attachments = "1 attachment"
|
||||
links = "link"
|
||||
} else {
|
||||
attachments = strconv.Itoa(unknownsLen) + " attachments"
|
||||
links = "links"
|
||||
}
|
||||
|
||||
var aside strings.Builder
|
||||
aside.WriteString(`<aside>`)
|
||||
aside.WriteString(`<p>`)
|
||||
aside.WriteString(`Note from ` + config.GetHost() + `: ` + attachments + ` in this status could not be downloaded. Treat the following external ` + links + ` with care:`)
|
||||
aside.WriteString(`<ul>`)
|
||||
for _, a := range unknowns {
|
||||
var (
|
||||
remoteURL = *a.RemoteURL
|
||||
base = path.Base(remoteURL)
|
||||
entry = fmt.Sprintf(`<a href="%s">%s</a>`, remoteURL, base)
|
||||
)
|
||||
if d := a.Description; d != nil && *d != "" {
|
||||
entry += ` [` + *d + `]`
|
||||
}
|
||||
aside.WriteString(`<li>` + entry + `</li>`)
|
||||
}
|
||||
aside.WriteString(`</ul>`)
|
||||
aside.WriteString(`</p>`)
|
||||
aside.WriteString(`</aside>`)
|
||||
|
||||
return text.SanitizeToHTML(aside.String()), arr
|
||||
}
|
||||
|
|
|
@ -165,23 +165,79 @@ func GenerateURIsForAccount(username string) *UserURIs {
|
|||
}
|
||||
}
|
||||
|
||||
// GenerateURIForAttachment generates a URI for an attachment/emoji/header etc.
|
||||
// Will produced something like https://example.org/fileserver/01FPST95B8FC3HG3AGCDKPQNQ2/attachment/original/01FPST9QK4V5XWS3F9Z4F2G1X7.gif
|
||||
func GenerateURIForAttachment(accountID string, mediaType string, mediaSize string, mediaID string, extension string) string {
|
||||
protocol := config.GetProtocol()
|
||||
host := config.GetHost()
|
||||
return fmt.Sprintf("%s://%s/%s/%s/%s/%s/%s.%s", protocol, host, FileserverPath, accountID, mediaType, mediaSize, mediaID, extension)
|
||||
// URIForAttachment generates a URI for
|
||||
// an attachment/emoji/header etc.
|
||||
//
|
||||
// Will produce something like:
|
||||
//
|
||||
// "https://example.org/fileserver/01FPST95B8FC3HG3AGCDKPQNQ2/attachment/original/01FPST9QK4V5XWS3F9Z4F2G1X7.gif"
|
||||
func URIForAttachment(
|
||||
accountID string,
|
||||
mediaType string,
|
||||
mediaSize string,
|
||||
mediaID string,
|
||||
extension string,
|
||||
) string {
|
||||
const format = "%s://%s/%s/%s/%s/%s/%s.%s"
|
||||
|
||||
return fmt.Sprintf(
|
||||
format,
|
||||
config.GetProtocol(),
|
||||
config.GetHost(),
|
||||
FileserverPath,
|
||||
accountID,
|
||||
mediaType,
|
||||
mediaSize,
|
||||
mediaID,
|
||||
extension,
|
||||
)
|
||||
}
|
||||
|
||||
// GenerateURIForEmoji generates an activitypub uri for a new emoji.
|
||||
func GenerateURIForEmoji(emojiID string) string {
|
||||
protocol := config.GetProtocol()
|
||||
host := config.GetHost()
|
||||
return fmt.Sprintf("%s://%s/%s/%s", protocol, host, EmojiPath, emojiID)
|
||||
// StoragePathForAttachment generates a storage
|
||||
// path for an attachment/emoji/header etc.
|
||||
//
|
||||
// Will produce something like:
|
||||
//
|
||||
// "01FPST95B8FC3HG3AGCDKPQNQ2/attachment/original/01FPST9QK4V5XWS3F9Z4F2G1X7.gif"
|
||||
func StoragePathForAttachment(
|
||||
accountID string,
|
||||
mediaType string,
|
||||
mediaSize string,
|
||||
mediaID string,
|
||||
extension string,
|
||||
) string {
|
||||
const format = "%s/%s/%s/%s.%s"
|
||||
|
||||
return fmt.Sprintf(
|
||||
format,
|
||||
accountID,
|
||||
mediaType,
|
||||
mediaSize,
|
||||
mediaID,
|
||||
extension,
|
||||
)
|
||||
}
|
||||
|
||||
// GenerateURIForTag generates an activitypub uri for a tag.
|
||||
func GenerateURIForTag(name string) string {
|
||||
// URIForEmoji generates an
|
||||
// ActivityPub URI for an emoji.
|
||||
//
|
||||
// Will produce something like:
|
||||
//
|
||||
// "https://example.org/emoji/01FPST9QK4V5XWS3F9Z4F2G1X7"
|
||||
func URIForEmoji(emojiID string) string {
|
||||
const format = "%s://%s/%s/%s"
|
||||
|
||||
return fmt.Sprintf(
|
||||
format,
|
||||
config.GetProtocol(),
|
||||
config.GetHost(),
|
||||
EmojiPath,
|
||||
emojiID,
|
||||
)
|
||||
}
|
||||
|
||||
// URIForTag generates an activitypub uri for a tag.
|
||||
func URIForTag(name string) string {
|
||||
protocol := config.GetProtocol()
|
||||
host := config.GetHost()
|
||||
return fmt.Sprintf("%s://%s/%s/%s", protocol, host, TagsPath, strings.ToLower(name))
|
||||
|
|
|
@ -123,9 +123,14 @@ func (og *ogMeta) withStatus(status *apimodel.Status) *ogMeta {
|
|||
|
||||
if !status.Sensitive && len(status.MediaAttachments) > 0 {
|
||||
a := status.MediaAttachments[0]
|
||||
og.Image = a.PreviewURL
|
||||
|
||||
og.ImageWidth = strconv.Itoa(a.Meta.Small.Width)
|
||||
og.ImageHeight = strconv.Itoa(a.Meta.Small.Height)
|
||||
|
||||
if a.PreviewURL != nil {
|
||||
og.Image = *a.PreviewURL
|
||||
}
|
||||
|
||||
if a.Description != nil {
|
||||
og.ImageAlt = *a.Description
|
||||
}
|
||||
|
|
|
@ -125,7 +125,7 @@ func (m *Module) threadGETHandler(c *gin.Context) {
|
|||
}
|
||||
|
||||
// Fill in the rest of the thread context.
|
||||
context, errWithCode := m.processor.Status().ContextGet(ctx, authed.Account, targetStatusID)
|
||||
context, errWithCode := m.processor.Status().WebContextGet(ctx, targetStatusID)
|
||||
if errWithCode != nil {
|
||||
apiutil.WebErrorHandler(c, errWithCode, instanceGet)
|
||||
return
|
||||
|
|
BIN
testrig/media/sloth-original.jpg
Normal file
BIN
testrig/media/sloth-original.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 5.2 MiB |
BIN
testrig/media/sloth-small.jpg
Normal file
BIN
testrig/media/sloth-small.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 50 KiB |
|
@ -1114,6 +1114,113 @@ func NewTestAttachments() map[string]*gtsmodel.MediaAttachment {
|
|||
Header: util.Ptr(true),
|
||||
Cached: util.Ptr(true),
|
||||
},
|
||||
"remote_account_2_status_1_attachment_1": {
|
||||
ID: "01HE7Y3C432WRSNS10EZM86SA5",
|
||||
StatusID: "01HE7XJ1CG84TBKH5V9XKBVGF5",
|
||||
URL: "http://localhost:8080/fileserver/01FHMQX3GAABWSM0S2VZEC2SWC/attachment/original/01HE7Y3C432WRSNS10EZM86SA5.jpg",
|
||||
RemoteURL: "http://example.org/fileserver/01HE7Y659ZWZ02JM4AWYJZ176Q/attachment/original/01HE7Y6G0EMCKST3Q0914WW0MS.jpg",
|
||||
CreatedAt: TimeMustParse("2023-11-02T12:44:25+02:00"),
|
||||
UpdatedAt: TimeMustParse("2023-11-02T12:44:25+02:00"),
|
||||
Type: gtsmodel.FileTypeImage,
|
||||
FileMeta: gtsmodel.FileMeta{
|
||||
Original: gtsmodel.Original{
|
||||
Width: 3000,
|
||||
Height: 2000,
|
||||
Size: 6000000,
|
||||
Aspect: 1.5,
|
||||
},
|
||||
Small: gtsmodel.Small{
|
||||
Width: 512,
|
||||
Height: 341,
|
||||
Size: 174592,
|
||||
Aspect: 1.5014663,
|
||||
},
|
||||
Focus: gtsmodel.Focus{
|
||||
X: 0,
|
||||
Y: 0,
|
||||
},
|
||||
},
|
||||
AccountID: "01FHMQX3GAABWSM0S2VZEC2SWC",
|
||||
Description: "Photograph of a sloth, Public Domain.",
|
||||
Blurhash: "LNEC{|w}0K9GsEtPM|j[NFbHoeof",
|
||||
Processing: 2,
|
||||
File: gtsmodel.File{
|
||||
Path: "01FHMQX3GAABWSM0S2VZEC2SWC/attachment/original/01HE7Y3C432WRSNS10EZM86SA5.jpg",
|
||||
ContentType: "image/jpg",
|
||||
FileSize: 5450054,
|
||||
UpdatedAt: TimeMustParse("2023-11-02T12:44:25+02:00"),
|
||||
},
|
||||
Thumbnail: gtsmodel.Thumbnail{
|
||||
Path: "01FHMQX3GAABWSM0S2VZEC2SWC/attachment/small/01HE7Y3C432WRSNS10EZM86SA5.jpg",
|
||||
ContentType: "image/jpeg",
|
||||
FileSize: 50820,
|
||||
UpdatedAt: TimeMustParse("2023-11-02T12:44:25+02:00"),
|
||||
URL: "http://localhost:8080/fileserver/01FHMQX3GAABWSM0S2VZEC2SWC/attachment/small/01HE7Y3C432WRSNS10EZM86SA5.jpg",
|
||||
},
|
||||
Avatar: util.Ptr(false),
|
||||
Header: util.Ptr(false),
|
||||
Cached: util.Ptr(true),
|
||||
},
|
||||
"remote_account_2_status_1_attachment_2": {
|
||||
ID: "01HE7ZFX9GKA5ZZVD4FACABSS9",
|
||||
StatusID: "01HE7XJ1CG84TBKH5V9XKBVGF5",
|
||||
URL: "http://localhost:8080/fileserver/01FHMQX3GAABWSM0S2VZEC2SWC/attachment/original/01HE7ZFX9GKA5ZZVD4FACABSS9.svg",
|
||||
RemoteURL: "http://example.org/fileserver/01HE7Y659ZWZ02JM4AWYJZ176Q/attachment/original/01HE7ZGJYTSYMXF927GF9353KR.svg",
|
||||
CreatedAt: TimeMustParse("2023-11-02T12:44:25+02:00"),
|
||||
UpdatedAt: TimeMustParse("2023-11-02T12:44:25+02:00"),
|
||||
Type: gtsmodel.FileTypeUnknown,
|
||||
FileMeta: gtsmodel.FileMeta{},
|
||||
AccountID: "01FHMQX3GAABWSM0S2VZEC2SWC",
|
||||
Description: "SVG line art of a sloth, public domain",
|
||||
Blurhash: "L26*j+~qE1RP?wxut7ofRlM{R*of",
|
||||
Processing: 2,
|
||||
File: gtsmodel.File{
|
||||
Path: "01FHMQX3GAABWSM0S2VZEC2SWC/attachment/original/01HE7ZFX9GKA5ZZVD4FACABSS9.svg",
|
||||
ContentType: "image/svg",
|
||||
FileSize: 147819,
|
||||
UpdatedAt: TimeMustParse("2023-11-02T12:44:25+02:00"),
|
||||
},
|
||||
Thumbnail: gtsmodel.Thumbnail{
|
||||
Path: "01FHMQX3GAABWSM0S2VZEC2SWC/attachment/small/01HE7ZFX9GKA5ZZVD4FACABSS9.jpg",
|
||||
ContentType: "image/jpeg",
|
||||
FileSize: 0,
|
||||
UpdatedAt: TimeMustParse("2023-11-02T12:44:25+02:00"),
|
||||
URL: "http://localhost:8080/fileserver/01FHMQX3GAABWSM0S2VZEC2SWC/attachment/small/01HE7ZFX9GKA5ZZVD4FACABSS9.jpg",
|
||||
},
|
||||
Avatar: util.Ptr(false),
|
||||
Header: util.Ptr(false),
|
||||
Cached: util.Ptr(false),
|
||||
},
|
||||
"remote_account_2_status_1_attachment_3": {
|
||||
ID: "01HE88YG74PVAB81PX2XA9F3FG",
|
||||
StatusID: "01HE7XJ1CG84TBKH5V9XKBVGF5",
|
||||
URL: "http://localhost:8080/fileserver/01FHMQX3GAABWSM0S2VZEC2SWC/attachment/original/01HE88YG74PVAB81PX2XA9F3FG.mp3",
|
||||
RemoteURL: "http://example.org/fileserver/01HE7Y659ZWZ02JM4AWYJZ176Q/attachment/original/01HE892Y8ZS68TQCNPX7J888P3.mp3",
|
||||
CreatedAt: TimeMustParse("2023-11-02T12:44:25+02:00"),
|
||||
UpdatedAt: TimeMustParse("2023-11-02T12:44:25+02:00"),
|
||||
Type: gtsmodel.FileTypeUnknown,
|
||||
FileMeta: gtsmodel.FileMeta{},
|
||||
AccountID: "01FHMQX3GAABWSM0S2VZEC2SWC",
|
||||
Description: "Jolly salsa song, public domain.",
|
||||
Blurhash: "",
|
||||
Processing: 2,
|
||||
File: gtsmodel.File{
|
||||
Path: "01FHMQX3GAABWSM0S2VZEC2SWC/attachment/original/01HE88YG74PVAB81PX2XA9F3FG.mp3",
|
||||
ContentType: "audio/mpeg",
|
||||
FileSize: 147819,
|
||||
UpdatedAt: TimeMustParse("2023-11-02T12:44:25+02:00"),
|
||||
},
|
||||
Thumbnail: gtsmodel.Thumbnail{
|
||||
Path: "01FHMQX3GAABWSM0S2VZEC2SWC/attachment/small/01HE88YG74PVAB81PX2XA9F3FG.jpg",
|
||||
ContentType: "image/jpeg",
|
||||
FileSize: 0,
|
||||
UpdatedAt: TimeMustParse("2023-11-02T12:44:25+02:00"),
|
||||
URL: "http://localhost:8080/fileserver/01FHMQX3GAABWSM0S2VZEC2SWC/attachment/small/01HE88YG74PVAB81PX2XA9F3FG.jpg",
|
||||
},
|
||||
Avatar: util.Ptr(false),
|
||||
Header: util.Ptr(false),
|
||||
Cached: util.Ptr(false),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1279,6 +1386,10 @@ func newTestStoredAttachments() map[string]filenames {
|
|||
Original: "thoughtsofdog-original.jpg",
|
||||
Small: "thoughtsofdog-small.jpg",
|
||||
},
|
||||
"remote_account_2_status_1_attachment_1": {
|
||||
Original: "sloth-original.jpg",
|
||||
Small: "sloth-small.jpg",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1830,6 +1941,33 @@ func NewTestStatuses() map[string]*gtsmodel.Status {
|
|||
ActivityStreamsType: ap.ActivityQuestion,
|
||||
PollID: "01HEN2R65468ZG657C4ZPHJ4EX",
|
||||
},
|
||||
"remote_account_2_status_1": {
|
||||
ID: "01HE7XJ1CG84TBKH5V9XKBVGF5",
|
||||
URI: "http://example.org/users/Some_User/statuses/01HE7XJ1CG84TBKH5V9XKBVGF5",
|
||||
URL: "http://example.org/@Some_User/statuses/01HE7XJ1CG84TBKH5V9XKBVGF5",
|
||||
Content: `<p>hi <span class="h-card"><a href="http://localhost:8080/@admin" class="u-url mention" rel="nofollow noreferrer noopener" target="_blank">@<span>admin</span></a></span> here's some media for ya</p>`,
|
||||
AttachmentIDs: []string{"01HE7Y3C432WRSNS10EZM86SA5", "01HE7ZFX9GKA5ZZVD4FACABSS9", "01HE88YG74PVAB81PX2XA9F3FG"},
|
||||
CreatedAt: TimeMustParse("2023-11-02T12:44:25+02:00"),
|
||||
UpdatedAt: TimeMustParse("2023-11-02T12:44:25+02:00"),
|
||||
Local: util.Ptr(false),
|
||||
AccountURI: "http://example.org/users/Some_User",
|
||||
MentionIDs: []string{"01HE7XQNMKTVC8MNPCE1JGK4J3"},
|
||||
AccountID: "01FHMQX3GAABWSM0S2VZEC2SWC",
|
||||
InReplyToID: "01F8MH75CBF9JFX4ZAD54N0W0R",
|
||||
InReplyToAccountID: "01F8MH17FWEB39HZJ76B6VXSKF",
|
||||
InReplyToURI: "http://localhost:8080/users/admin/statuses/01F8MH75CBF9JFX4ZAD54N0W0R",
|
||||
BoostOfID: "",
|
||||
ContentWarning: "",
|
||||
Visibility: gtsmodel.VisibilityPublic,
|
||||
Sensitive: util.Ptr(false),
|
||||
Language: "en",
|
||||
CreatedWithApplicationID: "",
|
||||
Federated: util.Ptr(true),
|
||||
Boostable: util.Ptr(true),
|
||||
Replyable: util.Ptr(true),
|
||||
Likeable: util.Ptr(true),
|
||||
ActivityStreamsType: ap.ObjectNote,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2123,6 +2261,18 @@ func NewTestMentions() map[string]*gtsmodel.Mention {
|
|||
TargetAccountURI: "http://localhost:8080/users/the_mighty_zork",
|
||||
TargetAccountURL: "http://localhost:8080/@the_mighty_zork",
|
||||
},
|
||||
"remote_account_2_mention_admin": {
|
||||
ID: "01HE7XQNMKTVC8MNPCE1JGK4J3",
|
||||
StatusID: "01HE7XJ1CG84TBKH5V9XKBVGF5",
|
||||
CreatedAt: TimeMustParse("2023-11-02T12:44:25+02:00"),
|
||||
UpdatedAt: TimeMustParse("2023-11-02T12:44:25+02:00"),
|
||||
OriginAccountID: "01FHMQX3GAABWSM0S2VZEC2SWC",
|
||||
OriginAccountURI: "http://example.org/users/Some_User",
|
||||
TargetAccountID: "01F8MH17FWEB39HZJ76B6VXSKF",
|
||||
NameString: "@admin@localhost:8080",
|
||||
TargetAccountURI: "http://localhost:8080/users/admin",
|
||||
TargetAccountURL: "http://localhost:8080/@admin",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -339,6 +339,40 @@ main {
|
|||
object-fit: contain;
|
||||
background: $gray1;
|
||||
}
|
||||
|
||||
.unknown-attachment {
|
||||
.placeholder {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
padding: 0.8rem;
|
||||
border: 0.2rem dashed $white2;
|
||||
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
gap: 0.25rem;
|
||||
|
||||
color: $white2;
|
||||
|
||||
.placeholder-external-link {
|
||||
align-self: end;
|
||||
font-size: 2.5rem;
|
||||
}
|
||||
|
||||
.placeholder-icon {
|
||||
width: 100%;
|
||||
font-size: 3.5rem;
|
||||
text-align: center;
|
||||
margin-top: auto;
|
||||
}
|
||||
|
||||
.placeholder-link-to {
|
||||
width: 100%;
|
||||
text-align: center;
|
||||
margin-bottom: auto;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -53,42 +53,59 @@
|
|||
<div
|
||||
class="media photoswipe-gallery {{(len .) | oddOrEven }}{{if eq (len .) 1}} single{{end}}{{if eq (len .) 2}} double{{end}}">
|
||||
{{range $index, $media := .}}
|
||||
{{with $media}}
|
||||
<div class="media-wrapper">
|
||||
<details class="{{.Type}}-spoiler media-spoiler" {{if not $.Sensitive}}open{{end}}>
|
||||
<summary>
|
||||
<div class="show sensitive button" aria-hidden="true">
|
||||
Show sensitive media
|
||||
</div>
|
||||
<span class="eye button" role="button" tabindex="0" aria-label="Toggle media">
|
||||
<i class="hide fa fa-fw fa-eye-slash" aria-hidden="true"></i>
|
||||
<i class="show fa fa-fw fa-eye" aria-hidden="true"></i>
|
||||
</span>
|
||||
{{with $media}}
|
||||
<div class="media-wrapper">
|
||||
<details class="{{.Type}}-spoiler media-spoiler" {{if not $.Sensitive}}open{{end}}>
|
||||
<summary>
|
||||
<div class="show sensitive button" aria-hidden="true">
|
||||
Show sensitive media
|
||||
</div>
|
||||
<span class="eye button" role="button" tabindex="0" aria-label="Toggle media">
|
||||
<i class="hide fa fa-fw fa-eye-slash" aria-hidden="true"></i>
|
||||
<i class="show fa fa-fw fa-eye" aria-hidden="true"></i>
|
||||
</span>
|
||||
|
||||
{{if eq .Type "video"}}
|
||||
<video {{if .Description}} title="{{.Description}}" {{end}}>
|
||||
<source type="video/mp4" src="{{.URL}}" />
|
||||
</video>
|
||||
{{else}}
|
||||
<img {{if .Description}} title="{{.Description}}" {{end}} src="{{.PreviewURL}}" />
|
||||
{{end}}
|
||||
</summary>
|
||||
{{if eq .Type "video"}}
|
||||
<video class="plyr-video photoswipe-slide" controls {{if .Description}}alt="{{.Description}}"
|
||||
title="{{.Description}}" {{end}} data-pswp-index="{{$index}}" data-pswp-width="{{.Meta.Original.Width}}px"
|
||||
data-pswp-height="{{.Meta.Original.Height}}px">
|
||||
<source type="video/mp4" src="{{.URL}}" />
|
||||
</video>
|
||||
{{else}}
|
||||
<a class="photoswipe-slide" href="{{.URL}}" target="_blank" {{if .Description}}title="{{.Description}}" {{end}}
|
||||
data-pswp-width="{{.Meta.Original.Width}}px" data-pswp-height="{{.Meta.Original.Height}}px"
|
||||
data-cropped="true">
|
||||
<img src="{{.PreviewURL}}" {{if .Description}}alt="{{.Description}}" {{end}} />
|
||||
</a>
|
||||
{{end}}
|
||||
</details>
|
||||
</div>
|
||||
{{end}}
|
||||
{{if eq .Type "video"}}
|
||||
<video {{if .Description}} title="{{.Description}}" {{end}}>
|
||||
<source type="video/mp4" src="{{.URL}}" />
|
||||
</video>
|
||||
{{else if eq .Type "image"}}
|
||||
<img {{if .Description}} title="{{.Description}}" {{end}} src="{{.PreviewURL}}" />
|
||||
{{end}}
|
||||
</summary>
|
||||
{{if eq .Type "video"}}
|
||||
<video class="plyr-video photoswipe-slide" controls {{if .Description}}alt="{{.Description}}"
|
||||
title="{{.Description}}" {{end}} data-pswp-index="{{$index}}" data-pswp-width="{{.Meta.Original.Width}}px"
|
||||
data-pswp-height="{{.Meta.Original.Height}}px">
|
||||
<source type="video/mp4" src="{{.URL}}" />
|
||||
</video>
|
||||
{{else if eq .Type "image"}}
|
||||
<a class="photoswipe-slide" href="{{.URL}}" target="_blank" {{if .Description}}title="{{.Description}}" {{end}}
|
||||
data-pswp-width="{{.Meta.Original.Width}}px" data-pswp-height="{{.Meta.Original.Height}}px"
|
||||
data-cropped="true">
|
||||
<img src="{{.PreviewURL}}" {{if .Description}}alt="{{.Description}}" {{end}} />
|
||||
</a>
|
||||
{{else}}
|
||||
<a
|
||||
class="unknown-attachment"
|
||||
href="{{.RemoteURL}}"
|
||||
target="_blank"
|
||||
{{if .Description}}
|
||||
title="Link to external media: {{.Description}} {{.RemoteURL}}"
|
||||
{{else}}
|
||||
title="Link to external media. {{.RemoteURL}}"
|
||||
{{end}}
|
||||
>
|
||||
<div class="placeholder" aria-hidden="true">
|
||||
<i class="placeholder-external-link fa fa-external-link"></i>
|
||||
<i class="placeholder-icon fa fa-file-text"></i>
|
||||
<div class="placeholder-link-to">External media</div>
|
||||
</div>
|
||||
</a>
|
||||
{{end}}
|
||||
</details>
|
||||
</div>
|
||||
{{end}}
|
||||
{{end}}
|
||||
</div>
|
||||
{{end}}
|
||||
|
|
Loading…
Reference in a new issue