Skip to content

Latest commit

 

History

History
177 lines (143 loc) · 4.49 KB

File metadata and controls

177 lines (143 loc) · 4.49 KB

中文文档

Description

There are several cards arranged in a row, and each card has an associated number of points. The points are given in the integer array cardPoints.

In one step, you can take one card from the beginning or from the end of the row. You have to take exactly k cards.

Your score is the sum of the points of the cards you have taken.

Given the integer array cardPoints and the integer k, return the maximum score you can obtain.

 

Example 1:

Input: cardPoints = [1,2,3,4,5,6,1], k = 3
Output: 12
Explanation: After the first step, your score will always be 1. However, choosing the rightmost card first will maximize your total score. The optimal strategy is to take the three cards on the right, giving a final score of 1 + 6 + 5 = 12.

Example 2:

Input: cardPoints = [2,2,2], k = 2
Output: 4
Explanation: Regardless of which two cards you take, your score will always be 4.

Example 3:

Input: cardPoints = [9,7,7,9,7,7,9], k = 7
Output: 55
Explanation: You have to take all the cards. Your score is the sum of points of all cards.

 

Constraints:

  • 1 <= cardPoints.length <= 105
  • 1 <= cardPoints[i] <= 104
  • 1 <= k <= cardPoints.length

Solutions

Python3

class Solution:
    def maxScore(self, cardPoints: List[int], k: int) -> int:
        n = len(cardPoints)
        s = [0] * (n + 1)
        for i in range(n):
            s[i + 1] = s[i] + cardPoints[i]
        mi = inf
        for i in range(n):
            j = i + (n - k) - 1
            if j < n:
                mi = min(mi, s[j + 1] - s[i])
        return s[-1] - mi

Java

class Solution {

    public int maxScore(int[] cardPoints, int k) {
        int n = cardPoints.length;
        int[] s = new int[n + 1];
        for (int i = 0; i < n; ++i) {
            s[i + 1] = s[i] + cardPoints[i];
        }
        int mi = Integer.MAX_VALUE;
        for (int i = 0; i < n; ++i) {
            int j = i + (n - k) - 1;
            if (j < n) {
                mi = Math.min(mi, s[j + 1] - s[i]);
            }
        }
        return s[n] - mi;
    }
}

C++

class Solution {
public:
    int maxScore(vector<int>& cardPoints, int k) {
        int n = cardPoints.size();
        vector<int> s(n + 1);
        for (int i = 0; i < n; ++i) s[i + 1] = s[i] + cardPoints[i];
        int mi = INT_MAX;
        for (int i = 0; i < n; ++i) {
            int j = i + (n - k) - 1;
            if (j < n) mi = min(mi, s[j + 1] - s[i]);
        }
        return s[n] - mi;
    }
};

Go

func maxScore(cardPoints []int, k int) int {
	n := len(cardPoints)
	s := make([]int, n+1)
	for i := 0; i < n; i++ {
		s[i+1] = s[i] + cardPoints[i]
	}
	mi := math.MaxInt64
	for i := 0; i < n; i++ {
		j := i + (n - k) - 1
		if j < n {
			mi = min(mi, s[j+1]-s[i])
		}
	}
	return s[n] - mi
}

func min(a, b int) int {
	if a < b {
		return a
	}
	return b
}

TypeScript

function maxScore(cardPoints: number[], k: number): number {
    const n = cardPoints.length;
    let sum = cardPoints.slice(0, n - k).reduce((r, v) => r + v, 0);
    let min = sum;
    for (let i = 0; i < k; i++) {
        sum += cardPoints[n - k + i] - cardPoints[i];
        min = Math.min(min, sum);
    }
    return cardPoints.reduce((r, v) => r + v, 0) - min;
}

Rust

impl Solution {
    pub fn max_score(card_points: Vec<i32>, k: i32) -> i32 {
        let (k, n) = (k as usize, card_points.len());
        let mut sum = card_points.iter().take(n - k).sum::<i32>();
        let mut min = sum;
        for i in 0..k {
            sum += card_points[n - k + i] - card_points[i];
            min = min.min(sum);
        }
        card_points.iter().sum::<i32>() - min
    }
}

...