ssdeep.pm 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729
  1. package Digest::ssdeep;
  2. use warnings;
  3. use strict;
  4. use Carp;
  5. use Text::WagnerFischer qw/distance/;
  6. use List::Util qw/max/;
  7. use version;
  8. our $VERSION = qv('0.9.3');
  9. BEGIN {
  10. require Exporter;
  11. use vars qw(@ISA @EXPORT_OK);
  12. @ISA = qw(Exporter);
  13. @EXPORT_OK = qw(
  14. ssdeep_hash
  15. ssdeep_hash_file
  16. ssdeep_compare
  17. ssdeep_dump_last
  18. );
  19. }
  20. use constant FNV_PRIME => 0x01000193;
  21. use constant FNV_INIT => 0x28021967;
  22. use constant MAX_LENGTH => 64;
  23. # Weights:
  24. # same = 0
  25. # insertion/deletion = 1
  26. # mismatch = 2
  27. # swap = N/A (should be 5)
  28. $Text::WagnerFischer::REFC = [ 0, 1, 2 ];
  29. my @b64 = split '',
  30. "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
  31. my @DEBUG_LAST;
  32. my @last7chars; # will use character 7 places before
  33. { # begin rolling hash internals
  34. my $roll_h1; # rolling hash internal
  35. my $roll_h2; # rolling hash internal
  36. my $roll_h3; # rolling hash internal
  37. # Resets the roll hash internal status
  38. sub _reset_rollhash {
  39. @last7chars =
  40. ( 0, 0, 0, 0, 0, 0, 0 ); # will use character 7 places before
  41. $roll_h1 = 0;
  42. $roll_h2 = 0;
  43. $roll_h3 = 0;
  44. }
  45. # Updates rolling_hash's internal state and return the rolling_hash value.
  46. # Parameters: the next character.
  47. # Returns: the actual rolling hash value
  48. sub _roll_hash {
  49. my $char = shift;
  50. my $char7bf = shift @last7chars;
  51. push @last7chars, $char;
  52. $roll_h2 += 7 * $char - $roll_h1;
  53. $roll_h1 += $char - $char7bf;
  54. $roll_h3 <<= 5; # 5*7 = 35 (so it vanish after 7 iterations)
  55. $roll_h3 &= 0xffffffff;
  56. $roll_h3 ^= $char;
  57. #printf("c=%d cAnt=%d H1=%u H2=%u H3=%u\n",
  58. # $char, $char7bf,
  59. # $roll_h1, $roll_h2, $roll_h3);
  60. return $roll_h1 + $roll_h2 + $roll_h3;
  61. }
  62. } # end rolling hash internals
  63. # In-place updates the FNV hash using the new character
  64. # _update_fnv($fnvhash, $newchar);
  65. sub _update_fnv {
  66. use integer; # we need integer overflow in multiplication
  67. $_[0] *= FNV_PRIME;
  68. $_[0] &= 0xffffffff;
  69. $_[0] ^= $_[1];
  70. no integer;
  71. }
  72. # Calculates initial blocksize
  73. # Parameter: the length of the whole data
  74. sub _calc_initbs {
  75. my $length = shift;
  76. # MAX_LENGTH * bs < length
  77. # MAX_LENGTH * 3 * 2 * 2 * 2 * ... < length
  78. #my $n = int(log($length / (MAX_LENGTH * 3)) / log(2));
  79. #my $bs = 3 * 2**$n;
  80. my $bs = 3;
  81. $bs *= 2 while ( $bs * MAX_LENGTH < $length );
  82. return $bs > 3 ? $bs : 3;
  83. }
  84. # Calculates the ssdeep fuzzy hash of a string
  85. # Parameters: the string
  86. # Returns: the fuzzy hash in string or array
  87. sub ssdeep_hash {
  88. my $string = shift;
  89. return unless defined $string;
  90. my $bs = _calc_initbs( length $string );
  91. @DEBUG_LAST = ();
  92. my $hash1;
  93. my $hash2;
  94. while (1) {
  95. _reset_rollhash();
  96. my $fnv1 = FNV_INIT; # traditional hash blocksize
  97. my $fnv2 = FNV_INIT; # traditional hash 2*blocksize
  98. $hash1 = '';
  99. $hash2 = '';
  100. for my $i ( 0 .. length($string) - 1 ) {
  101. my $c = ord( substr( $string, $i, 1 ) );
  102. #printf("c: %u, H1=%x\tH2=%x\n", $c, $fnv1, $fnv2);
  103. my $h = _roll_hash($c);
  104. _update_fnv( $fnv1, $c ); # blocksize FNV hash
  105. _update_fnv( $fnv2, $c ); # 2* blocksize FNV hash
  106. if ( $h % $bs == ( $bs - 1 ) and length $hash1 < MAX_LENGTH - 1 ) {
  107. #printf "Hash $h Trigger 1 at $i\n";
  108. my $b64char = $b64[ $fnv1 & 63 ];
  109. $hash1 .= $b64char;
  110. push @DEBUG_LAST,
  111. [ 1, $i + 1, join( '|', @last7chars ), $fnv1, $b64char ];
  112. $fnv1 = FNV_INIT;
  113. }
  114. if ( $h % ( 2 * $bs ) == ( 2 * $bs - 1 )
  115. and length $hash2 < MAX_LENGTH / 2 - 1 )
  116. {
  117. #printf "Hash $h Trigger 2 at $i\n";
  118. my $b64char = $b64[ $fnv2 & 63 ];
  119. $hash2 .= $b64char;
  120. push @DEBUG_LAST,
  121. [ 2, $i + 1, join( '|', @last7chars ), $fnv2, $b64char ];
  122. $fnv2 = FNV_INIT;
  123. }
  124. }
  125. $hash1 .= $b64[ $fnv1 & 63 ];
  126. $hash2 .= $b64[ $fnv2 & 63 ];
  127. push @DEBUG_LAST,
  128. [
  129. 1, length($string),
  130. join( '|', @last7chars ), $fnv1,
  131. $b64[ $fnv1 & 63 ]
  132. ];
  133. push @DEBUG_LAST,
  134. [
  135. 2, length($string),
  136. join( '|', @last7chars ), $fnv2,
  137. $b64[ $fnv2 & 63 ]
  138. ];
  139. last if $bs <= 3 or length $hash1 >= MAX_LENGTH / 2;
  140. $bs = int( $bs / 2 ); # repeat with half blocksize if no enough triggers
  141. $bs > 3 or $bs = 3;
  142. }
  143. my @outarray = ( $bs, $hash1, $hash2 );
  144. return wantarray ? @outarray : join ':', @outarray;
  145. }
  146. # Convenient function. Slurps file. You should not use it for long files.
  147. # You should not use pure perl implementation for long files anyway.
  148. # Parameter: filename
  149. # Returns: ssdeep hash in string or array format
  150. sub ssdeep_hash_file {
  151. my $file = shift;
  152. # Slurp the file (we can also use File::Slurp
  153. local ($/);
  154. open( my $fh, '<', $file ) or return;
  155. my $string = <$fh>;
  156. close $fh;
  157. return ssdeep_hash($string);
  158. }
  159. # Determines the longest common substring
  160. sub _lcss {
  161. my $strings = join "\0", @_;
  162. my $lcs = '';
  163. for my $n ( 1 .. length $strings ) {
  164. my $re = "(.{$n})" . '.*\0.*\1' x ( @_ - 1 );
  165. last unless $strings =~ $re;
  166. $lcs = $1;
  167. }
  168. return $lcs;
  169. }
  170. # Calculates how similar two strings are using the Wagner-Fischer package.
  171. # Parameters: min_lcs, string A, string B
  172. # Returns: the likeliness being 0 totally dissimilar and 100 same string
  173. # Returns 0 also if the longest common substring is shorter than min_lcs
  174. sub _likeliness {
  175. my ( $min_lcs, $a, $b ) = @_;
  176. return 0 unless length( _lcss( $a, $b ) ) >= $min_lcs;
  177. my $dist = distance( $a, $b );
  178. #$DB::single = 2;
  179. # Must follow ssdeep original's code for compatibility
  180. # $dist = 100 * $dist / (length($a) + length($b));
  181. $dist = int( $dist * MAX_LENGTH / ( length($a) + length($b) ) );
  182. $dist = int( 100 * $dist / 64 );
  183. $dist > 100 and $dist = 100;
  184. return 100 - $dist;
  185. }
  186. # We accept hash in both array and scalar format
  187. # Parameters: $hashA, $hashB, [$min_lcs]
  188. # Parameters: \@hashA, \@hashB, [$min_lcs]
  189. # Returns: file matching in %
  190. sub ssdeep_compare {
  191. my @hashA; # hash = bs:hash1:hash2
  192. my @hashB; # hash = bs:hash1:hash2
  193. @hashA = ref( $_[0] ) eq 'ARRAY' ? @{ $_[0] } : split ':', $_[0];
  194. @hashB = ref( $_[1] ) eq 'ARRAY' ? @{ $_[1] } : split ':', $_[1];
  195. my $min_lcs = $_[2] || 7;
  196. if ( @hashA != 3 or $hashA[0] !~ /\d+/ ) {
  197. carp "Argument 1 is not a ssdeep hash.";
  198. return;
  199. }
  200. if ( @hashB != 3 or $hashB[0] !~ /\d+/ ) {
  201. carp "Argument 2 is not a ssdeep hash.";
  202. return;
  203. }
  204. # Remove sequences of more than three repeated character
  205. s/(.)\1{3,}/$1/gi for @hashA;
  206. s/(.)\1{3,}/$1/gi for @hashB;
  207. # Remove trailing newlines
  208. s/\s+$//gi for @hashA;
  209. s/\s+$//gi for @hashB;
  210. #$DB::single = 2;
  211. my $like;
  212. # Blocksize comparison
  213. # bsA:hash_bsA:hash_2*bsA
  214. # bsB:hash_bsB:hash_2*bsB
  215. if ( $hashA[0] == $hashB[0] ) {
  216. # Compare both hashes
  217. my $like1 = _likeliness( $min_lcs, $hashA[1], $hashB[1] );
  218. my $like2 = _likeliness( $min_lcs, $hashA[2], $hashB[2] );
  219. $like = max( $like1, $like2 );
  220. }
  221. elsif ( $hashA[0] == 2 * $hashB[0] ) {
  222. # Compare hash_bsA with hash_2*bsB
  223. $like = _likeliness( $min_lcs, $hashA[1], $hashB[2] );
  224. }
  225. elsif ( 2 * $hashA[0] == $hashB[0] ) {
  226. # Compare hash_2*bsA with hash_bsB
  227. $like = _likeliness( $min_lcs, $hashA[2], $hashB[1] );
  228. }
  229. else {
  230. # Nothing suitable to compare, sorry
  231. return 0;
  232. }
  233. return $like;
  234. }
  235. # Dump internals information. See help.
  236. sub ssdeep_dump_last {
  237. my @result;
  238. for (@DEBUG_LAST) {
  239. push @result, join ",", @{$_};
  240. }
  241. return @result;
  242. }
  243. 1; # Magic true value required at end of module
  244. __END__
  245. =head1 NAME
  246. Digest::ssdeep - Pure Perl ssdeep (CTPH) fuzzy hashing
  247. =head1 VERSION
  248. This document describes Digest::ssdeep version 0.9.0
  249. =head1 SYNOPSIS
  250. use Digest::ssdeep qw/ssdeep_hash ssdeep_hash_file/;
  251. $hash = ssdeep_hash( $string );
  252. # or in array context:
  253. @hash = ssdeep_hash( $string );
  254. $hash = ssdeep_hash_file( "data.txt" );
  255. @details = ssdeep_dump_last();
  256. use Digest::ssdeep qw/ssdeep_compare/;
  257. $match = ssdeep_compare( $hashA, $hashB );
  258. $match = ssdeep_compare( \@hashA, \@hashB );
  259. =head1 DESCRIPTION
  260. This module provides simple implementation of ssdeep fuzzy hashing also known as Context Triggered Piecewise
  261. Hashing (CTPH).
  262. =head2 Fuzzy hashing algorithm
  263. Please, refer to Jesse Kornblum's paper for a detailed discussion (L<SEE ALSO>).
  264. To calculate the CTPH we should choose a maximum signature length. Then divide
  265. the file in as many chunks as this length. Calculate a hash or checksum for
  266. each chunk and map it to a character. The fuzzy hashing is the concatenation of
  267. all the characters.
  268. We cannot use fixed length blocks to separate the file. Because if we add or
  269. remove a character all of the following blocks are also changed. So we must
  270. divide the file using the "context" i.e. a block starts and ends in one of the
  271. predefined sequence of characters. So the problem is 'Which contexts
  272. -sequences- we define to separate the file in N parts?.'
  273. This is the 'roll' of the I<rolling hash>. It is a function of the N last
  274. inputs, in this case the 7 last characters. The result of the rolling hash
  275. function is uniformly spread between all valid output values. This makes the
  276. rolling hash some kind of I<pseudo-random> function whose output depends only
  277. on the last N characters. Since the output is supposed to be uniform, we can
  278. modulus BS and the expected values are 0 to BS-1 with the same probability.
  279. Let the blocksize (BS) be the length of file divided by the maximum signature
  280. length (i.e. 64). If we split the file each time the rolling hash mod BS gives
  281. BS-1 we get 64 blocks. This is not a good approach because if the length
  282. changes, blocksize changes also. So we cannot compare files with dissimilar
  283. sizes. One good approach is to take some 'predefined' blocksizes and choose the
  284. one that fits based on the file size. The blocksizes in ssdeep are C<3, 6, 12,
  285. ..., 3 * 2^i>.
  286. So this is the algorithm:
  287. =over
  288. =item *
  289. Given the file size we calculate an initial blocksize (BS).
  290. =item *
  291. For each character we calculate the rolling hash R. Its output value depends
  292. only on the 7 last characters sequence.
  293. =item *
  294. Each time C<R mod BS = BS-1> (we meet one of the trigger 7 characters
  295. sequences) we write down the I<traditional hash> of the current block and start
  296. another block.
  297. =back
  298. The pitfall is Rolling Hash is statistically uniform, but it does not mean it
  299. will give us exactly 64 blocks.
  300. =over
  301. =item *
  302. Sometimes it will gives us more than 64 blocks. In that case we will
  303. concatenate the trailing blocks.
  304. =item *
  305. Sometimes it will gives us less than 64 blocks. No problem, 64 is the maximum
  306. length, it can be less.
  307. =item *
  308. Sometimes it will gives us less than 32 blocks. In that case, we should try a
  309. half-size blocksize to get more blocks.
  310. =back
  311. The I<traditional hash> is an usual hash or checksum function. We use 32 bit
  312. FNV-1a hash (L<SEE ALSO>). But its output is 32 bits, so we need to map it to a
  313. base-64 character alphabet. That is, we only use the 6 least significant bits
  314. of FNV-1a hash.
  315. =head2 Output
  316. The ssdeep hash has this shape: C<BS:hash1:hash2>
  317. =over
  318. =item B<BS>
  319. It is the blocksize. We can only compare hashes from the same blocksize.
  320. =item B<hash1>
  321. This is the concatenation of FNV-1a results (mapped to 64 characters) for each block in the file.
  322. =item B<hash2>
  323. This is the same that hash1 but using double the blocksize. We write this result
  324. because a small change can halve or double the blocksize. If this happens,
  325. we can compare at least one part of the two signatures.
  326. =back
  327. =head2 Comparison
  328. There are several algorithms to compare two strings. I have used the same that
  329. ssdeep uses for compatibility reasons. Only in certain cases, the result from
  330. this module is not the same as ssdeep compiled version. Please see
  331. L<DIFFERENCES> below for details.
  332. These are the steps for matching calculation:
  333. =over
  334. =item *
  335. The first step is to compare the block sizes. We only can compare hashes calculated
  336. for the same block size. In one ssdeep string we have both blocksize and double
  337. blocksize hashes. So we try to match at least of the hashes. If they have no
  338. common block sizes, the comparison returns 0.
  339. =item *
  340. Remove sequences of more than three equal characters. These same character
  341. sequences have little information about the file and bias the matching score.
  342. =item *
  343. Test for a coincidence of, at least 7 characters. This is the default, but this
  344. value can be changed. If the longest common substring is not a least this
  345. length, the function returns 0. We expect a lot of collisions since we are
  346. mapping 32 bit FNV values into 64 character output. This is a way to remove
  347. false positives.
  348. =item *
  349. We use the Wagner-Fischer algorithm to compute the Levenshtein distance using
  350. these weights:
  351. =over
  352. =item *
  353. Same character: 0
  354. =item *
  355. Adition or deletion: 1
  356. =item *
  357. Sustitution: 2
  358. =back
  359. =item *
  360. Following the original ssdeep algorithm we scale the value so the output be between 0
  361. and 100.
  362. =back
  363. =head1 INTERFACE
  364. This section describes the recommended interface for generating and comparing
  365. ssdeep fuzzy hashes.
  366. =over
  367. =item B<ssdeep_hash>
  368. Calculates the ssdeep hash of the input string.
  369. Usage:
  370. $hash = ssdeep_hash( $string );
  371. or in array context
  372. @hash = ssdeep_hash( $string );
  373. In scalar context it returns a
  374. hash with the format C<bs:hash1:hash2>. Being C<bs> the blocksize, C<hash1>
  375. the fuzzy hash for this blocksize and C<hash2> the hash for double blocksize.
  376. The maximum length of each hash is 64 characters.
  377. In array context it returns the same components above but in a 3 elements array.
  378. =item B<ssdeep_hash_file>
  379. Calculates the hash of a file.
  380. Usage:
  381. $hash = ssdeep_hash_file( "/tmp/malware1.exe" );
  382. This is a convenient function. Returns the same of ssdeep_file in scalar or
  383. array context.
  384. Since this function slurps the whole file into memory, you should not use it in
  385. big files. You should not use this module for big files, use libfuzzy wrapper
  386. instead (L<BUGS AND LIMITATIONS>).
  387. Returns B<undef> on errors.
  388. =item B<ssdeep_compare>
  389. Calculates the matching between two hashes.
  390. Usage. To compare two scalar hashes:
  391. $match = ssdeep_compare( $hashA, $hashB );
  392. To compare two hashes in array format:
  393. $match = ssdeep_compare( \@hashA, \@hashB );
  394. The default is to discard hashes with less than 7 characters common substring.
  395. To override this default and set this limit to any number you can use:
  396. $match = ssdeep_compare( $hashA, $hashB, 4 );
  397. The result is a matching score between 0 and 100. See L<Comparison> for
  398. algorithm details.
  399. =item B<ssdeep_dump_last>
  400. Returns an array with information of the last hash calculation. Useful for
  401. debugging or extended details.
  402. Usage after a calculation:
  403. $hash = ssdeep_hash_file( "/tmp/malware1.exe" );
  404. @details = ssdeep_dump_last();
  405. The output is an array of CSV values.
  406. ...
  407. 2,125870,187|245|110|27|190|66|97,1393131242,q
  408. 1,210575,13|216|13|115|29|52|208,4009217630,e
  409. 2,210575,13|216|13|115|29|52|208,4009217630,e
  410. 1,210730,61|231|220|179|40|89|210,1069791891,T
  411. 1,237707,45|66|251|98|56|138|91,4014305026,C
  412. ....
  413. Meaning of the output array:
  414. =over
  415. =item B<Field 1>
  416. Part of the hash which is affected. 1 for the fist part, 2 for the second part.
  417. =item B<Field 2>
  418. Offset of the file where the chunk ends.
  419. =item B<Field 3>
  420. Sequence of 7 characters that triggered the rolling hash.
  421. =item B<Field 4>
  422. Value of the rolling hash at this moment.
  423. =item B<Field 5>
  424. Character output to the fuzzy hash due to this rolling hash trigger.
  425. =back
  426. So we can read it this way:
  427. At byte 125870 of the input file, there is a sequence of these 7 characters:
  428. C<187 245 110 27 190 66 97>. That sequence triggered the second part of the
  429. hash. The FNV hash value of the current chunk is 1393131242 that maps to
  430. character C<q>.
  431. Or this way:
  432. From the 4th row I know the letter C<T> in the first hash comes from the
  433. chunk that started at 210575+1 (the one-starting row before) and ends at
  434. 210730. The whole FNV hash of this block was 1069791891.
  435. =back
  436. =head1 BUGS AND LIMITATIONS
  437. =over
  438. =item B<Small blocksize comparison>
  439. Original ssdeep limit the matching of small blocksize hashes. So when comparing
  440. them the matching is limited by its size and is never 100%. This algorithm do
  441. not behaviours that way. Small block sizes hashes are compared as big block
  442. sizes ones.
  443. =item B<Performance>
  444. This is a Pure Perl implementation. The performance is far from optimal. To
  445. calculate hashes more efficiently, please use compiled software like libfuzzy
  446. bindings (L<SEE ALSO>).
  447. =item B<Test 64 bits systems>
  448. This module has not been tested in 64 bit systems yet.
  449. =back
  450. Please report any bugs or feature requests to
  451. C<bug-digest-ssdeep@rt.cpan.org>, or through the web interface at
  452. L<http://rt.cpan.org>.
  453. =head1 SEE ALSO
  454. =over
  455. =item Ssdeep's home page
  456. L<http://ssdeep.sourceforge.net/>
  457. =item Jesse Kornblum's original paper I<Identifying almost identical files using context triggered piecewise hashing>
  458. L<http://dfrws.org/2006/proceedings/12-Kornblum.pdf>
  459. =item I<Data::FuzzyHash> Perl binding of binary libfuzzy libraries
  460. L<https://github.com/hideo55/Data-FuzzyHash>
  461. =item Text::WagnerFischer - An implementation of the Wagner-Fischer edit distance.
  462. L<http://search.cpan.org/perldoc?Text%3A%3AWagnerFischer>
  463. =item FNV hash's description
  464. L<http://www.isthe.com/chongo/tech/comp/fnv/>
  465. =back
  466. =head1 AUTHOR
  467. Reinoso Guzman C<< <reinoso.guzman@gmail.com> >>
  468. =head1 LICENCE AND COPYRIGHT
  469. Copyright (c) 2013, Reinoso Guzman C<< <reinoso.guzman@gmail.com> >>. All rights reserved.
  470. This module is free software; you can redistribute it and/or
  471. modify it under the same terms as Perl itself. See L<perlartistic>.
  472. =head1 DISCLAIMER OF WARRANTY
  473. BECAUSE THIS SOFTWARE IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
  474. FOR THE SOFTWARE, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
  475. OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
  476. PROVIDE THE SOFTWARE "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER
  477. EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
  478. WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE
  479. ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE SOFTWARE IS WITH
  480. YOU. SHOULD THE SOFTWARE PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL
  481. NECESSARY SERVICING, REPAIR, OR CORRECTION.
  482. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
  483. WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
  484. REDISTRIBUTE THE SOFTWARE AS PERMITTED BY THE ABOVE LICENCE, BE
  485. LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL,
  486. OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE
  487. THE SOFTWARE (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
  488. RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
  489. FAILURE OF THE SOFTWARE TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
  490. SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
  491. SUCH DAMAGES.