@@ -221,12 +221,6 @@ func (c *skillComponentImpl) handleSkillPackage(ctx context.Context, sha256 stri
221221 obj := object
222222 defer obj .Close ()
223223
224- // Get object info to get size for zip reader
225- objectInfo , err := c .s3Client .StatObject (ctx , c .config .S3 .Bucket , objectKey , minio.StatObjectOptions {})
226- if err != nil {
227- return nil , fmt .Errorf ("failed to get object info: %w" , err )
228- }
229-
230224 // Create a buffered reader to detect file format
231225 bufReader := bufio .NewReader (object )
232226 // Read first 8 bytes to detect file format
@@ -239,15 +233,14 @@ func (c *skillComponentImpl) handleSkillPackage(ctx context.Context, sha256 stri
239233 // Decompress based on content detection (since objectKey has no extension)
240234 // Try to detect format from content
241235 if bytes .HasPrefix (magicBytes , []byte {0x50 , 0x4B , 0x03 , 0x04 }) {
242- // ZIP format - use S3 Range + ReaderAt for streaming decompression
243- s3ReaderAt := & s3ReaderAt {
244- ctx : ctx ,
245- client : c .s3Client ,
246- bucket : c .config .S3 .Bucket ,
247- key : objectKey ,
248- size : objectInfo .Size ,
249- }
250- return decompressZip (s3ReaderAt , objectInfo .Size )
236+ // ZIP format - read entire file into memory
237+ // Reset reader to start (including the magic bytes we already read)
238+ r := io .MultiReader (bytes .NewReader (magicBytes ), bufReader )
239+ zipContent , err := io .ReadAll (r )
240+ if err != nil {
241+ return nil , fmt .Errorf ("failed to read zip file: %w" , err )
242+ }
243+ return decompressZip (bytes .NewReader (zipContent ), int64 (len (zipContent )))
251244 } else if bytes .HasPrefix (magicBytes , []byte {0x1F , 0x8B , 0x08 }) {
252245 // GZIP format (tar.gz) - use streaming decompression
253246 // Reset reader to start (including the magic bytes we already read)
@@ -760,41 +753,6 @@ const (
760753 MaxIndividualFileSize = 50 * 1024 * 1024
761754)
762755
763- // s3ReaderAt implements io.ReaderAt using S3 Range requests
764- type s3ReaderAt struct {
765- ctx context.Context
766- client s3.Client
767- bucket string
768- key string
769- size int64
770- }
771-
772- // ReadAt reads data from the S3 object starting at the given offset
773- func (r * s3ReaderAt ) ReadAt (p []byte , off int64 ) (n int , err error ) {
774- // Calculate end offset
775- end := off + int64 (len (p )) - 1
776- if end >= r .size {
777- end = r .size - 1
778- }
779-
780- // Create range option
781- opts := minio.GetObjectOptions {}
782- err = opts .SetRange (off , end )
783- if err != nil {
784- return 0 , err
785- }
786-
787- // Get object with range
788- object , err := r .client .GetObject (r .ctx , r .bucket , r .key , opts )
789- if err != nil {
790- return 0 , err
791- }
792- defer object .Close ()
793-
794- // Read data
795- return io .ReadFull (object , p )
796- }
797-
798756// decompressZip decompresses a zip file and returns a list of CommitFile objects
799757func decompressZip (reader io.ReaderAt , size int64 ) ([]types.CommitFile , error ) {
800758 zipReader , err := zip .NewReader (reader , size )
@@ -810,6 +768,11 @@ func decompressZip(reader io.ReaderAt, size int64) ([]types.CommitFile, error) {
810768 continue
811769 }
812770
771+ // Skip .git directory and files
772+ if strings .Contains (file .Name , "/.git/" ) || strings .HasPrefix (file .Name , ".git/" ) || file .Name == ".git" {
773+ continue
774+ }
775+
813776 // Check individual file size
814777 if file .UncompressedSize64 > uint64 (MaxIndividualFileSize ) {
815778 return nil , fmt .Errorf ("file too large: %s (size: %d bytes, max: %d bytes)" , file .Name , file .UncompressedSize64 , MaxIndividualFileSize )
@@ -873,6 +836,11 @@ func decompressTarGz(reader io.Reader) ([]types.CommitFile, error) {
873836 continue
874837 }
875838
839+ // Skip .git directory and files
840+ if strings .Contains (header .Name , "/.git/" ) || strings .HasPrefix (header .Name , ".git/" ) || header .Name == ".git" {
841+ continue
842+ }
843+
876844 // Check individual file size
877845 if header .Size > MaxIndividualFileSize {
878846 return nil , fmt .Errorf ("file too large: %s (size: %d bytes, max: %d bytes)" , header .Name , header .Size , MaxIndividualFileSize )
0 commit comments