Last Stage #MG6: Clone Implementation in Javascript

I’m stuck on Stage 7

I’ve tried all hit and trials, my checksum is not matching, my objects are not getting zlib decompressed(error I get is EOF).

Here are my logs:

001e# service=git-upload-pack
0000015376f7b3d46ad1752d843246cc81907aba55a84127 HEADmulti_ack thin-pack side-band side-band-64k ofs-delta shallow deepen-since deepen-not deepen-relative no-progress include-tag multi_ack_detailed allow-tip-sha1-in-want allow-reachable-sha1-in-want no-done symref=HEAD:refs/heads/main filter object-format=sha1 agent=git/github-f133c3a1d7e6 
003d76f7b3d46ad1752d843246cc81907aba55a84127 refs/heads/main
0000
[
  '001e# service=git-upload-pack',
  '0000015376f7b3d46ad1752d843246cc81907aba55a84127 HEAD\x00multi_ack thin-pack side-band side-band-64k ofs-delta shallow deepen-since deepen-not deepen-relative no-progress include-tag multi_ack_detailed allow-tip-sha1-in-want allow-reachable-sha1-in-want no-done symref=HEAD:refs/heads/main filter object-format=sha1 agent=git/github-f133c3a1d7e6',
  '003d76f7b3d46ad1752d843246cc81907aba55a84127 refs/heads/main',
  '0000'
]
76f7b3d46ad1752d843246cc81907aba55a84127
[
  <Buffer 30 30 30 38 4e 41 4b 0a 50 41 43 4b 00 00 00 02 00 00 00 12 97 0d 78 9c ad 8c 41 0a c2 30 10 00 ef 79 45 ee a2 24 dd 8d e9 82 88 50 10 bf b1 26 ab 09 ... 883 more bytes>,
  <Buffer 23 14 e8 c8 db aa 0d d7 13 3e 5c 1b a2 8f 09 03 ad af f6 f0 23 46 44 14 22 f0 61 70 16 c9 23 c6 99 b2 30 0f a8 37 de 00 55 c3 7d 25 8e 29 79 61 0c 42 ... 1320 more bytes>,
  <Buffer fb cb d9 d9 f4 dd c9 0c 66 c0 9d c9 0b f2 cf ab 88 b3 25 38 2a 4b dc cd a6 31 b8 a8 62 33 46 08 b3 4f e6 d6 9c 28 2f 20 5f b8 58 11 93 c4 2c ba 65 11 ... 1320 more bytes>,
  <Buffer 13 2e 02 04 3a a2 36 44 11 f4 61 b5 aa c6 31 fa 8d 9c cb 46 f2 35 1f 8f b1 f6 f1 ef 8c 4e f7 35 e4 7d 89 68 35 35 0f ca 00 d3 68 94 5c 86 d4 f3 8c ad ... 1320 more bytes>,
  <Buffer e6 5a 88 02 df fc 94 d4 1c 86 6f 76 27 33 23 84 95 22 73 d8 7b 96 df 5f cb 93 f7 96 43 a5 1b 00 74 c3 29 ef b2 77 78 9c 8d 95 cd 52 83 30 14 85 f7 7d ... 1320 more bytes>,
  <Buffer 08 0c 34 aa 39 2a 58 29 02 cc 94 19 fa 2e 23 4d 4b 84 b9 88 30 85 58 59 40 30 29 f9 f0 44 10 dd ad c0 b2 cb 13 a6 35 5c db 3d 23 43 a0 b3 91 03 3c c3 ... 389 more bytes>,
  <Buffer 96 53 59 f1 03 7b 41 38 ee 85 4f cc 2a a1 74 d9 91 ab 8a 0f f0 92 a6 07 52 d1 da 84 ea bb f1 f5 c4 09 e5 41 5e c2 84 2a 32 79 29 13 0a fe 72 44 ad 76 ... 1314 more bytes>,
  <Buffer 87 b5 86 ca 5f 45 bb 1c 67 6f 0d 2c a7 ee 82 7b 29 2a e9 dc f8 41 e0 8f b9 1b 85 9e 00 e7 e8 e0 10 6f 6a 95 80 31 32 32 ac ab ba 74 25 ef 35 db 22 3c ... 1320 more bytes>,
  <Buffer ab 33 23 ee 9d 08 c1 2f 2d b5 d5 97 97 8c 73 8d c6 5c 1a ac 66 39 f2 71 ce ca 32 63 f9 67 07 38 aa bc 9b ff 9e 06 a5 eb e5 e3 72 0d ef 92 44 b2 0a 4d ... 1320 more bytes>,
  <Buffer d4 07 1d c1 da dc 93 df 4f 7e 7f 43 bb 6c f7 2e 43 23 d4 ec 1d 6c b4 a1 67 94 46 c7 27 73 71 de 68 f2 ad f3 fd 5e 1a 32 e5 ff 66 d9 cd ae 9d ef 98 d2 ... 1320 more bytes>,
  <Buffer 67 06 6e dc 4e 10 89 b1 35 de a0 5b 23 c0 22 03 ac 90 28 43 ef ff 09 e7 0f 0d e5 e6 30 de 15 13 05 4a 07 db c0 ba 7a d4 da 01 07 f3 89 a8 7c 6f f2 29 ... 1320 more bytes>,
  <Buffer 5e b7 a1 c6 25 34 0c b9 b0 cb 42 be db 14 4e f0 c9 2c 64 e0 64 5a 93 36 28 42 83 b6 50 e4 95 31 5f 1b f1 15 a7 15 ce 85 4c 9f 16 5a 9d 15 3c 29 3e 77 ... 158 more bytes>
]
47648119b5c822702a7b29410f71330cec06eb70
26bc0e7073a3a68e8271642e19fb86e9139b0d34
D:\GIT_Crafter\codecrafters-git-javascript\app\main.js:490
    throw new Error('Checksum does not match');
    ^

Error: Checksum does not match
    at saveObjects (D:\GIT_Crafter\codecrafters-git-javascript\app\main.js:490:11)
    at IncomingMessage.<anonymous> (D:\GIT_Crafter\codecrafters-git-javascript\app\main.js:462:11)
    at IncomingMessage.emit (node:events:532:35)
    at endReadableNT (node:internal/streams/readable:1696:12)
    at process.processTicksAndRejections (node:internal/process/task_queues:82:21)

Node.js v22.2.0

And here’s a snippet of my code:


function clone_repo(repo_url, target_dir){
  fs.mkdirSync(path.join(__dirname,target_dir));
  fs.mkdirSync(path.join(__dirname,target_dir, ".git"), { recursive: true });
  fs.mkdirSync(path.join(__dirname,target_dir, ".git", "objects"), { recursive: true });
  fs.mkdirSync(path.join(__dirname,target_dir, ".git", "refs"), { recursive: true });

  fs.writeFileSync(
    path.join(__dirname,target_dir, ".git", "HEAD"),
    "ref: refs/heads/main\n"
  );

  repo_path= path.join(__dirname,target_dir);

  //Network Request to Fetch Repository Information

  https.get(`${repo_url}/info/refs?service=git-upload-pack`,(resp)=>{
    let content='';

    resp.on('data',(chunk)=>{
      content+=chunk;
      // console.log(chunk)
    });

   


    

    resp.on('end',()=>{
      console.log(content);
      const respAsArr= content.split('\n');
      let packHash;
      console.log(respAsArr)

      for(let c of respAsArr){
        if(c.includes('refs/heads/main') && c.includes('003')){
          const tup= c.split(" ");
          packHash= tup[0].slice(4); //Remove leading 003f 
        }
      }

      console.log(packHash)

     // Perform POST request to git-upload-pack endpoint
     fetchPack(repo_url, packHash, repo_path);

      


      
    });

  })


}



function fetchPack(repoUrl, packHash, repoDir) {
  // Construct POST request options
  const postData = Buffer.from(`0032want ${packHash}\n00000009done\n`, 'utf-8');
  const options = {
      method: 'POST',
      headers: {
          'Content-Type': 'application/x-git-upload-pack-request'
      }
  };

  // Perform POST request to git-upload-pack endpoint
  const req = https.request(`${repoUrl}/git-upload-pack`, options, (res) => {
      let packData = [];

      res.on('data', (chunk) => {
          packData.push(chunk);
      });

      res.on('end', () => {
          // Parse pack data and save objects to local repository
          console.log(packData)
          const packFileData= Buffer.concat(packData)
          saveObjects(packFileData, repoDir);
      });

      res.on('error', (error) => {
          console.error('Error fetching pack data:', error);
      });
  });

  req.on('error', (error) => {
      console.error('Error connecting to git-upload-pack endpoint:', error);
  });

  // Write POST data to request
  req.write(postData);
  req.end();
}


function saveObjects(packRespData,repoDir){
  //Read the Packfile header of 12 Byte

  // console.log(packRespData.slice(0, 12).toString('ascii'))
  const ddd= packRespData.slice(0,packRespData.length-20)
  checksum_calc= crypto.createHash("sha1").update(ddd).digest('hex');
  checksum= packRespData.slice(packRespData.length-20).toString('hex');
  console.log(checksum_calc)
  console.log(checksum)
  if (checksum_calc !== checksum) {
    throw new Error('Checksum does not match');
}

  



  if (packRespData.slice(8, 12).toString('ascii') !== 'PACK') {
    throw new Error('Invalid packfile format');
}

  const magic= packRespData.toString('utf-8',8,12);
  
  // Read the version and number of objects from the packfile header
  const versionBuffer = packRespData.slice(12, 16); // Slice from the 12th byte to the 15th byte (4 bytes)
  const version = versionBuffer.readUInt32BE(0); // Interpret the bytes as a 32-bit unsigned integer// Read 4 bytes as big endian unsigned integer
  const temp= packRespData.slice(16,20);
  
  const numObjects = temp.readUInt32BE(0); // Read next 4 bytes as big endian unsigned integer

  console.log(`Packfile: ${magic}, Version: ${version}, Number of Objects: ${numObjects}`);

  let offset= 20;

  for(let i=0;i<numObjects;i++){

    const { object, nextOffset } = readObject(packRespData, offset);
    console.log(`Object ${i + 1}: Type = ${object.type}, Size = ${object.data_size}`);
    offset = nextOffset; // Move to the start of the next object
  }

}

function readObject(buffer,start){
  const typeAndSize = decodeVarInt(buffer, start);
  

  // First we will shift right by 4 bits
  const type = typeAndSize.type  // Top three bits of the first byte
  const size = typeAndSize.value; // Remaining bits are size

  console.log(type,size,typeAndSize.bytesUsed)

  if (start + typeAndSize.bytesUsed + size >buffer.length) {
    console.error("Calculated size exceeds buffer length.");
}

  // const compressedData = buffer.slice(start + typeAndSize.bytesUsed, start + typeAndSize.bytesUsed + size);
  // console.log(compressedData)
    
  // try {
    const decompressedData = zlib.inflateSync(buffer);
    decompressedData= decompressedData.slice(start + typeAndSize.bytesUsed, start + typeAndSize.bytesUsed + size);
    // console.log("hii")
    
    // Further processing...
// } catch (err) {
//     console.error("Decompression failed:", err);
//     // Handle error or cleanup
// }

  return {
      object: {
          type: type,
          data_size: decompressedData.length,
      },
      nextOffset: start + typeAndSize.bytesUsed + size
  };

}




//To decode a variable-length integer from a byte stream 

//variable-length integers pack their least significant bits first, each subsequent byte contains increasingly significant bits

function decodeVarInt (buffer,offset){
  let value = 0;    // This will accumulate the integer value.
  let shift = 0;    // This tracks the "shift" for bits as we move through each byte.
  let bytesUsed = 0;  // This counts the number of bytes used to store the integer.
  let byte;        // This will store the current byte's value.
  let first=0;

  do {
      byte = buffer[offset + bytesUsed];  // Read the current byte at the position.

      //7 bits are part of the integer's value, while the 8th bit is only a flag for continuation.

      if(first===0){
        value |=(byte & 0b00001111) <<shift;
       
        
        first=1;
      }
      else{
        value |= (byte & 0x7f) << shift;   // Mask off the top bit and shift the remaining bits into position, then OR it with the accumulated value. 0x7f or 01111111 is 127
        
      }

     
      shift += 7;  // Increase the shift for the next byte's bits.
      bytesUsed++; // Increment the count of bytes used.
    } while (byte >=128); // Continue if the continuation bit is set (byte >= 128).
    const type = (buffer[offset] >> 4) & 0b0111;  // Extract type from the first byte
    

  return {
    value: value,  //Value: decoded integer
    type: type,
    bytesUsed: bytesUsed // No. of bytes used to decode the integer
    };

}


This is a tricky stage to debug :slight_smile: Looks like there have been no JS completions yet either so no code examples to look at either :confused:

We’re planning on splitting this down into multiple stages at some point, hopefully that’ll make this more tractable.

1 Like

Note: I’ve updated the title of this post to include the stage ID (#MG6). You can learn about the stages rename here: Upcoming change: Stages overhaul.

Going to close this out for now, when we split this into multiple stages we’ll have a better sense of what the failures are per-stage.

This topic was automatically closed 5 days after the last reply. New replies are no longer allowed.